Merge branch 'feature/UpdateAttr' into dev

This commit is contained in:
JackDandy 2023-02-09 14:34:29 +00:00
commit 765bc9689e
21 changed files with 1783 additions and 845 deletions

View file

@ -1,8 +1,9 @@
### 3.27.0 (202x-xx-xx xx:xx:00 UTC)
* Update Cachecontrol 0.12.6 (167a605) to 0.12.11 (c05ef9e)
* Add Filelock 3.9.0 (ce3e891)
* Remove Lockfile no longer used by Cachecontrol
* Update attr 20.3.0 (f3762ba) to 22.2.0 (a9960de)
* Update cachecontrol 0.12.6 (167a605) to 0.12.11 (c05ef9e)
* Add filelock 3.9.0 (ce3e891)
* Remove lockfile no longer used by cachecontrol
* Update Msgpack 1.0.0 (fa7d744) to 1.0.4 (b5acfd5)
* Update diskcache 5.1.0 (40ce0de) to 5.4.0 (1cb1425)
* Update Rarfile 4.0 (55fe778) to 4.1a1 (8a72967)

View file

@ -1,10 +1,12 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
import sys
import warnings
from functools import partial
from . import converters, exceptions, filters, setters, validators
from ._cmp import cmp_using
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
@ -18,10 +20,19 @@ from ._make import (
make_class,
validate,
)
from ._next_gen import define, field, frozen, mutable
from ._version_info import VersionInfo
__version__ = "20.3.0"
if sys.version_info < (3, 7): # pragma: no cover
warnings.warn(
"Running attrs on Python 3.6 is deprecated & we intend to drop "
"support soon. If that's a problem for you, please let us know why & "
"we MAY re-evaluate: <https://github.com/python-attrs/attrs/pull/993>",
DeprecationWarning,
)
__version__ = "22.2.0"
__version_info__ = VersionInfo._from_version_string(__version__)
__title__ = "attrs"
@ -41,8 +52,14 @@ s = attributes = attrs
ib = attr = attrib
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
class AttrsInstance:
pass
__all__ = [
"Attribute",
"AttrsInstance",
"Factory",
"NOTHING",
"asdict",
@ -52,16 +69,21 @@ __all__ = [
"attrib",
"attributes",
"attrs",
"cmp_using",
"converters",
"define",
"evolve",
"exceptions",
"field",
"fields",
"fields_dict",
"filters",
"frozen",
"get_run_validators",
"has",
"ib",
"make_class",
"mutable",
"resolve_types",
"s",
"set_run_validators",
@ -69,8 +91,3 @@ __all__ = [
"validate",
"validators",
]
if sys.version_info[:2] >= (3, 6):
from ._next_gen import define, field, frozen, mutable
__all__.extend((define, field, frozen, mutable))

View file

@ -1,12 +1,16 @@
import enum
import sys
from typing import (
Any,
Callable,
Dict,
Generic,
List,
Optional,
Sequence,
Mapping,
Optional,
Protocol,
Sequence,
Tuple,
Type,
TypeVar,
@ -15,14 +19,20 @@ from typing import (
)
# `import X as X` is required to make these public
from . import converters as converters
from . import exceptions as exceptions
from . import filters as filters
from . import converters as converters
from . import setters as setters
from . import validators as validators
from ._cmp import cmp_using as cmp_using
from ._typing_compat import AttrsInstance_
from ._version_info import VersionInfo
if sys.version_info >= (3, 10):
from typing import TypeGuard
else:
from typing_extensions import TypeGuard
__version__: str
__version_info__: VersionInfo
__title__: str
@ -37,44 +47,86 @@ __copyright__: str
_T = TypeVar("_T")
_C = TypeVar("_C", bound=type)
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
_EqOrderType = Union[bool, Callable[[Any], Any]]
_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
_ConverterType = Callable[[Any], Any]
_FilterType = Callable[[Attribute[_T], _T], bool]
_FilterType = Callable[["Attribute[_T]", _T], bool]
_ReprType = Callable[[Any], str]
_ReprArgType = Union[bool, _ReprType]
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
_OnSetAttrArgType = Union[
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
]
_FieldTransformer = Callable[[type, List[Attribute]], List[Attribute]]
_FieldTransformer = Callable[
[type, List["Attribute[Any]"]], List["Attribute[Any]"]
]
# FIXME: in reality, if multiple validators are passed they must be in a list
# or tuple, but those are invariant and so would prevent subtypes of
# _ValidatorType from working when passed in a list or tuple.
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
# We subclass this here to keep the protocol's qualified name clean.
class AttrsInstance(AttrsInstance_, Protocol):
pass
# _make --
NOTHING: object
class _Nothing(enum.Enum):
NOTHING = enum.auto()
NOTHING = _Nothing.NOTHING
# NOTE: Factory lies about its return type to make this possible:
# `x: List[int] # = Factory(list)`
# Work around mypy issue #4554 in the common case by using an overload.
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
if sys.version_info >= (3, 8):
from typing import Literal
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Callable[[Any], _T],
takes_self: Literal[True],
) -> _T: ...
@overload
def Factory(
factory: Callable[[], _T],
takes_self: Literal[False],
) -> _T: ...
else:
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Union[Callable[[Any], _T], Callable[[], _T]],
takes_self: bool = ...,
) -> _T: ...
) -> _T: ...
# Static type inference support via __dataclass_transform__ implemented as per:
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
# This annotation must be applied to all overloads of "define" and "attrs"
#
# NOTE: This is a typing construct and does not exist at runtime. Extensions
# wrapping attrs decorators should declare a separate __dataclass_transform__
# signature in the extension module using the specification linked above to
# provide pyright support.
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]: ...
class Attribute(Generic[_T]):
name: str
default: Optional[_T]
validator: Optional[_ValidatorType[_T]]
repr: _ReprArgType
cmp: bool
eq: bool
order: bool
cmp: _EqOrderType
eq: _EqOrderType
order: _EqOrderType
hash: Optional[bool]
init: bool
converter: Optional[_ConverterType]
@ -82,6 +134,9 @@ class Attribute(Generic[_T]):
type: Optional[Type[_T]]
kw_only: bool
on_setattr: _OnSetAttrType
alias: Optional[str]
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
# NOTE: We had several choices for the annotation to use for type arg:
# 1) Type[_T]
@ -112,7 +167,7 @@ def attrib(
default: None = ...,
validator: None = ...,
repr: _ReprArgType = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
@ -120,9 +175,10 @@ def attrib(
converter: None = ...,
factory: None = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> Any: ...
# This form catches an explicit None or no default and infers the type from the
@ -132,7 +188,7 @@ def attrib(
default: None = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
@ -140,9 +196,10 @@ def attrib(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> _T: ...
# This form catches an explicit default argument.
@ -151,7 +208,7 @@ def attrib(
default: _T,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
@ -159,9 +216,10 @@ def attrib(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> _T: ...
# This form covers type=non-Type: e.g. forward references (str), Any
@ -170,7 +228,7 @@ def attrib(
default: Optional[_T] = ...,
validator: Optional[_ValidatorArgType[_T]] = ...,
repr: _ReprArgType = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
metadata: Optional[Mapping[Any, Any]] = ...,
@ -178,9 +236,10 @@ def attrib(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> Any: ...
@overload
def field(
@ -197,6 +256,7 @@ def field(
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> Any: ...
# This form catches an explicit None or no default and infers the type from the
@ -213,9 +273,10 @@ def field(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> _T: ...
# This form catches an explicit default argument.
@ -231,9 +292,10 @@ def field(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> _T: ...
# This form covers type=non-Type: e.g. forward references (str), Any
@ -249,17 +311,19 @@ def field(
converter: Optional[_ConverterType] = ...,
factory: Optional[Callable[[], _T]] = ...,
kw_only: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
alias: Optional[str] = ...,
) -> Any: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
def attrs(
maybe_cls: _C,
these: Optional[Dict[str, Any]] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
@ -270,21 +334,24 @@ def attrs(
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
unsafe_hash: Optional[bool] = ...,
) -> _C: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
def attrs(
maybe_cls: None = ...,
these: Optional[Dict[str, Any]] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
@ -295,20 +362,24 @@ def attrs(
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
unsafe_hash: Optional[bool] = ...,
) -> Callable[[_C], _C]: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
def define(
maybe_cls: _C,
*,
these: Optional[Dict[str, Any]] = ...,
repr: bool = ...,
unsafe_hash: Optional[bool] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
@ -325,13 +396,16 @@ def define(
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> _C: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
def define(
maybe_cls: None = ...,
*,
these: Optional[Dict[str, Any]] = ...,
repr: bool = ...,
unsafe_hash: Optional[bool] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
@ -348,22 +422,20 @@ def define(
getstate_setstate: Optional[bool] = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
match_args: bool = ...,
) -> Callable[[_C], _C]: ...
mutable = define
frozen = define # they differ only in their defaults
# TODO: add support for returning NamedTuple from the mypy plugin
class _Fields(Tuple[Attribute[Any], ...]):
def __getattr__(self, name: str) -> Attribute[Any]: ...
def fields(cls: type) -> _Fields: ...
def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
def validate(inst: Any) -> None: ...
def fields(cls: Type[AttrsInstance]) -> Any: ...
def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
def validate(inst: AttrsInstance) -> None: ...
def resolve_types(
cls: _C,
globalns: Optional[Dict[str, Any]] = ...,
localns: Optional[Dict[str, Any]] = ...,
attribs: Optional[List[Attribute[Any]]] = ...,
) -> _C: ...
# TODO: add support for returning a proper attrs class from the mypy plugin
@ -375,7 +447,7 @@ def make_class(
bases: Tuple[type, ...] = ...,
repr_ns: Optional[str] = ...,
repr: bool = ...,
cmp: Optional[bool] = ...,
cmp: Optional[_EqOrderType] = ...,
hash: Optional[bool] = ...,
init: bool = ...,
slots: bool = ...,
@ -386,8 +458,8 @@ def make_class(
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: Optional[bool] = ...,
order: Optional[bool] = ...,
eq: Optional[_EqOrderType] = ...,
order: Optional[_EqOrderType] = ...,
collect_by_mro: bool = ...,
on_setattr: Optional[_OnSetAttrArgType] = ...,
field_transformer: Optional[_FieldTransformer] = ...,
@ -400,24 +472,28 @@ def make_class(
# these:
# https://github.com/python/mypy/issues/4236
# https://github.com/python/typing/issues/253
# XXX: remember to fix attrs.asdict/astuple too!
def asdict(
inst: Any,
inst: AttrsInstance,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
dict_factory: Type[Mapping[Any, Any]] = ...,
retain_collection_types: bool = ...,
value_serializer: Optional[Callable[[type, Attribute, Any], Any]] = ...,
value_serializer: Optional[
Callable[[type, Attribute[Any], Any], Any]
] = ...,
tuple_keys: Optional[bool] = ...,
) -> Dict[str, Any]: ...
# TODO: add support for returning NamedTuple from the mypy plugin
def astuple(
inst: Any,
inst: AttrsInstance,
recurse: bool = ...,
filter: Optional[_FilterType[Any]] = ...,
tuple_factory: Type[Sequence[Any]] = ...,
retain_collection_types: bool = ...,
) -> Tuple[Any, ...]: ...
def has(cls: type) -> bool: ...
def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ...
def assoc(inst: _T, **changes: Any) -> _T: ...
def evolve(inst: _T, **changes: Any) -> _T: ...

155
lib/attr/_cmp.py Normal file
View file

@ -0,0 +1,155 @@
# SPDX-License-Identifier: MIT
import functools
import types
from ._make import _make_ne
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
def cmp_using(
eq=None,
lt=None,
le=None,
gt=None,
ge=None,
require_same_type=True,
class_name="Comparable",
):
"""
Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
``cmp`` arguments to customize field comparison.
The resulting class will have a full set of ordering methods if
at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
:param Optional[callable] eq: `callable` used to evaluate equality
of two objects.
:param Optional[callable] lt: `callable` used to evaluate whether
one object is less than another object.
:param Optional[callable] le: `callable` used to evaluate whether
one object is less than or equal to another object.
:param Optional[callable] gt: `callable` used to evaluate whether
one object is greater than another object.
:param Optional[callable] ge: `callable` used to evaluate whether
one object is greater than or equal to another object.
:param bool require_same_type: When `True`, equality and ordering methods
will return `NotImplemented` if objects are not of the same type.
:param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
See `comparison` for more details.
.. versionadded:: 21.1.0
"""
body = {
"__slots__": ["value"],
"__init__": _make_init(),
"_requirements": [],
"_is_comparable_to": _is_comparable_to,
}
# Add operations.
num_order_functions = 0
has_eq_function = False
if eq is not None:
has_eq_function = True
body["__eq__"] = _make_operator("eq", eq)
body["__ne__"] = _make_ne()
if lt is not None:
num_order_functions += 1
body["__lt__"] = _make_operator("lt", lt)
if le is not None:
num_order_functions += 1
body["__le__"] = _make_operator("le", le)
if gt is not None:
num_order_functions += 1
body["__gt__"] = _make_operator("gt", gt)
if ge is not None:
num_order_functions += 1
body["__ge__"] = _make_operator("ge", ge)
type_ = types.new_class(
class_name, (object,), {}, lambda ns: ns.update(body)
)
# Add same type requirement.
if require_same_type:
type_._requirements.append(_check_same_type)
# Add total ordering if at least one operation was defined.
if 0 < num_order_functions < 4:
if not has_eq_function:
# functools.total_ordering requires __eq__ to be defined,
# so raise early error here to keep a nice stack.
raise ValueError(
"eq must be define is order to complete ordering from "
"lt, le, gt, ge."
)
type_ = functools.total_ordering(type_)
return type_
def _make_init():
"""
Create __init__ method.
"""
def __init__(self, value):
"""
Initialize object with *value*.
"""
self.value = value
return __init__
def _make_operator(name, func):
"""
Create operator method.
"""
def method(self, other):
if not self._is_comparable_to(other):
return NotImplemented
result = func(self.value, other.value)
if result is NotImplemented:
return NotImplemented
return result
method.__name__ = f"__{name}__"
method.__doc__ = (
f"Return a {_operation_names[name]} b. Computed by attrs."
)
return method
def _is_comparable_to(self, other):
"""
Check whether `other` is comparable to `self`.
"""
for func in self._requirements:
if not func(self, other):
return False
return True
def _check_same_type(self, other):
"""
Return True if *self* and *other* are of the same type, False otherwise.
"""
return other.value.__class__ is self.value.__class__

13
lib/attr/_cmp.pyi Normal file
View file

@ -0,0 +1,13 @@
from typing import Any, Callable, Optional, Type
_CompareWithType = Callable[[Any, Any], bool]
def cmp_using(
eq: Optional[_CompareWithType] = ...,
lt: Optional[_CompareWithType] = ...,
le: Optional[_CompareWithType] = ...,
gt: Optional[_CompareWithType] = ...,
ge: Optional[_CompareWithType] = ...,
require_same_type: bool = ...,
class_name: str = ...,
) -> Type: ...

View file

@ -1,111 +1,22 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
import inspect
import platform
import sys
import threading
import types
import warnings
from collections.abc import Mapping, Sequence # noqa
PY2 = sys.version_info[0] == 2
PYPY = platform.python_implementation() == "PyPy"
PY310 = sys.version_info[:2] >= (3, 10)
PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
if PYPY or sys.version_info[:2] >= (3, 6):
ordered_dict = dict
else:
from collections import OrderedDict
ordered_dict = OrderedDict
if PY2:
from collections import Mapping, Sequence
from UserDict import IterableUserDict
# We 'bundle' isclass instead of using inspect as importing inspect is
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
def isclass(klass):
return isinstance(klass, (type, types.ClassType))
# TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
TYPE = "type"
def iteritems(d):
return d.iteritems()
# Python 2 is bereft of a read-only dict proxy, so we make one!
class ReadOnlyDict(IterableUserDict):
"""
Best-effort read-only dict wrapper.
"""
def __setitem__(self, key, val):
# We gently pretend we're a Python 3 mappingproxy.
raise TypeError(
"'mappingproxy' object does not support item assignment"
)
def update(self, _):
# We gently pretend we're a Python 3 mappingproxy.
raise AttributeError(
"'mappingproxy' object has no attribute 'update'"
)
def __delitem__(self, _):
# We gently pretend we're a Python 3 mappingproxy.
raise TypeError(
"'mappingproxy' object does not support item deletion"
)
def clear(self):
# We gently pretend we're a Python 3 mappingproxy.
raise AttributeError(
"'mappingproxy' object has no attribute 'clear'"
)
def pop(self, key, default=None):
# We gently pretend we're a Python 3 mappingproxy.
raise AttributeError(
"'mappingproxy' object has no attribute 'pop'"
)
def popitem(self):
# We gently pretend we're a Python 3 mappingproxy.
raise AttributeError(
"'mappingproxy' object has no attribute 'popitem'"
)
def setdefault(self, key, default=None):
# We gently pretend we're a Python 3 mappingproxy.
raise AttributeError(
"'mappingproxy' object has no attribute 'setdefault'"
)
def __repr__(self):
# Override to be identical to the Python 3 version.
return "mappingproxy(" + repr(self.data) + ")"
def metadata_proxy(d):
res = ReadOnlyDict()
res.data.update(d) # We blocked update, so we have to do it like this.
return res
def just_warn(*args, **kw): # pragma: no cover
"""
We only warn on Python 3 because we are not aware of any concrete
consequences of not setting the cell on Python 2.
"""
else: # Python 3 and later.
from collections.abc import Mapping, Sequence # noqa
def just_warn(*args, **kw):
"""
We only warn on Python 3 because we are not aware of any concrete
consequences of not setting the cell on Python 2.
"""
def just_warn(*args, **kw):
warnings.warn(
"Running interpreter doesn't sufficiently support code object "
"introspection. Some features like bare super() or accessing "
@ -114,16 +25,45 @@ else: # Python 3 and later.
stacklevel=2,
)
def isclass(klass):
return isinstance(klass, type)
TYPE = "class"
class _AnnotationExtractor:
"""
Extract type annotations from a callable, returning None whenever there
is none.
"""
def iteritems(d):
return d.items()
__slots__ = ["sig"]
def metadata_proxy(d):
return types.MappingProxyType(dict(d))
def __init__(self, callable):
try:
self.sig = inspect.signature(callable)
except (ValueError, TypeError): # inspect failed
self.sig = None
def get_first_param_type(self):
"""
Return the type annotation of the first argument if it's not empty.
"""
if not self.sig:
return None
params = list(self.sig.parameters.values())
if params and params[0].annotation is not inspect.Parameter.empty:
return params[0].annotation
return None
def get_return_type(self):
"""
Return the return type if it's not empty.
"""
if (
self.sig
and self.sig.return_annotation is not inspect.Signature.empty
):
return self.sig.return_annotation
return None
def make_set_closure_cell():
@ -155,9 +95,6 @@ def make_set_closure_cell():
try:
# Extract the code object and make sure our assumptions about
# the closure behavior are correct.
if PY2:
co = set_first_cellvar_to.func_code
else:
co = set_first_cellvar_to.__code__
if co.co_cellvars != ("x",) or co.co_freevars != ():
raise AssertionError # pragma: no cover
@ -165,15 +102,12 @@ def make_set_closure_cell():
# Convert this code object to a code object that sets the
# function's first _freevar_ (not cellvar) to the argument.
if sys.version_info >= (3, 8):
# CPython 3.8+ has an incompatible CodeType signature
# (added a posonlyargcount argument) but also added
# CodeType.replace() to do this without counting parameters.
set_first_freevar_code = co.replace(
co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
)
def set_closure_cell(cell, value):
cell.cell_contents = value
else:
args = [co.co_argcount]
if not PY2:
args.append(co.co_kwonlyargcount)
args.extend(
[
@ -214,9 +148,6 @@ def make_set_closure_cell():
return func
if PY2:
cell = make_func_with_cell().func_closure[0]
else:
cell = make_func_with_cell().__closure__[0]
set_closure_cell(cell, 100)
if cell.cell_contents != 100:
@ -229,3 +160,17 @@ def make_set_closure_cell():
set_closure_cell = make_set_closure_cell()
# Thread-local global to track attrs instances which are already being repr'd.
# This is needed because there is no other (thread-safe) way to pass info
# about the instances that are already being repr'd through the call stack
# in order to ensure we don't perform infinite recursion.
#
# For instance, if an instance contains a dict which contains that instance,
# we need to know that we're already repr'ing the outside instance from within
# the dict's repr() call.
#
# This lives here rather than in _make.py so that the functions in _make.py
# don't have a direct reference to the thread-local in their globals dict.
# If they have such a reference, it breaks cloudpickle.
repr_context = threading.local()

View file

@ -1,4 +1,4 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
__all__ = ["set_run_validators", "get_run_validators"]
@ -9,6 +9,10 @@ _run_validators = True
def set_run_validators(run):
"""
Set whether or not validators are run. By default, they are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
instead.
"""
if not isinstance(run, bool):
raise TypeError("'run' must be bool.")
@ -19,5 +23,9 @@ def set_run_validators(run):
def get_run_validators():
"""
Return whether or not validators are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
instead.
"""
return _run_validators

View file

@ -1,8 +1,8 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
import copy
from ._compat import iteritems
from ._make import NOTHING, _obj_setattr, fields
from .exceptions import AttrsAttributeNotFoundError
@ -25,7 +25,7 @@ def asdict(
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the `attr.Attribute` as the first argument and the
called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable dict_factory: A callable to produce dictionaries from. For
example, to produce ordered dictionaries instead of normal Python
@ -46,6 +46,8 @@ def asdict(
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
.. versionadded:: 20.3.0 *value_serializer*
.. versionadded:: 21.3.0 If a dict has a collection for a key, it is
serialized as a tuple.
"""
attrs = fields(inst.__class__)
rv = dict_factory()
@ -61,11 +63,11 @@ def asdict(
if has(v.__class__):
rv[a.name] = asdict(
v,
True,
filter,
dict_factory,
retain_collection_types,
value_serializer,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
@ -73,10 +75,11 @@ def asdict(
[
_asdict_anything(
i,
filter,
dict_factory,
retain_collection_types,
value_serializer,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in v
]
@ -87,20 +90,22 @@ def asdict(
(
_asdict_anything(
kk,
filter,
df,
retain_collection_types,
value_serializer,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv,
filter,
df,
retain_collection_types,
value_serializer,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in iteritems(v)
for kk, vv in v.items()
)
else:
rv[a.name] = v
@ -111,6 +116,7 @@ def asdict(
def _asdict_anything(
val,
is_key,
filter,
dict_factory,
retain_collection_types,
@ -123,22 +129,29 @@ def _asdict_anything(
# Attrs class.
rv = asdict(
val,
True,
filter,
dict_factory,
retain_collection_types,
value_serializer,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(val, (tuple, list, set, frozenset)):
cf = val.__class__ if retain_collection_types is True else list
if retain_collection_types is True:
cf = val.__class__
elif is_key:
cf = tuple
else:
cf = list
rv = cf(
[
_asdict_anything(
i,
filter,
dict_factory,
retain_collection_types,
value_serializer,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in val
]
@ -148,13 +161,23 @@ def _asdict_anything(
rv = df(
(
_asdict_anything(
kk, filter, df, retain_collection_types, value_serializer
kk,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv, filter, df, retain_collection_types, value_serializer
vv,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in iteritems(val)
for kk, vv in val.items()
)
else:
rv = val
@ -181,7 +204,7 @@ def astuple(
``attrs``-decorated.
:param callable filter: A callable whose return code determines whether an
attribute or element is included (``True``) or dropped (``False``). Is
called with the `attr.Attribute` as the first argument and the
called with the `attrs.Attribute` as the first argument and the
value as the second argument.
:param callable tuple_factory: A callable to produce tuples from. For
example, to produce lists instead of tuples.
@ -253,7 +276,7 @@ def astuple(
if has(vv.__class__)
else vv,
)
for kk, vv in iteritems(v)
for kk, vv in v.items()
)
)
else:
@ -291,7 +314,9 @@ def assoc(inst, **changes):
class.
.. deprecated:: 17.1.0
Use `evolve` instead.
Use `attrs.evolve` instead if you can.
This function will not be removed du to the slightly different approach
compared to `attrs.evolve`.
"""
import warnings
@ -302,13 +327,11 @@ def assoc(inst, **changes):
)
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in iteritems(changes):
for k, v in changes.items():
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
"{k} is not an attrs attribute on {cl}.".format(
k=k, cl=new.__class__
)
f"{k} is not an attrs attribute on {new.__class__}."
)
_obj_setattr(new, k, v)
return new
@ -336,14 +359,14 @@ def evolve(inst, **changes):
if not a.init:
continue
attr_name = a.name # To deal with private attributes.
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
init_name = a.alias
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
return cls(**changes)
def resolve_types(cls, globalns=None, localns=None):
def resolve_types(cls, globalns=None, localns=None, attribs=None):
"""
Resolve any strings and forward annotations in type annotations.
@ -360,31 +383,36 @@ def resolve_types(cls, globalns=None, localns=None):
:param type cls: Class to resolve.
:param Optional[dict] globalns: Dictionary containing global variables.
:param Optional[dict] localns: Dictionary containing local variables.
:param Optional[list] attribs: List of attribs for the given class.
This is necessary when calling from inside a ``field_transformer``
since *cls* is not an ``attrs`` class yet.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
class and you didn't pass any attribs.
:raise NameError: If types cannot be resolved because of missing variables.
:returns: *cls* so you can use this function also as a class decorator.
Please note that you have to apply it **after** `attr.s`. That means
the decorator has to come in the line **before** `attr.s`.
Please note that you have to apply it **after** `attrs.define`. That
means the decorator has to come in the line **before** `attrs.define`.
.. versionadded:: 20.1.0
.. versionadded:: 21.1.0 *attribs*
"""
try:
# Since calling get_type_hints is expensive we cache whether we've
# done it already.
cls.__attrs_types_resolved__
except AttributeError:
if getattr(cls, "__attrs_types_resolved__", None) != cls:
import typing
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
for field in fields(cls):
for field in fields(cls) if attribs is None else attribs:
if field.name in hints:
# Since fields have been frozen we must work around it.
_obj_setattr(field, "type", hints[field.name])
cls.__attrs_types_resolved__ = True
# We store the class we resolved so that subclasses know they haven't
# been resolved.
cls.__attrs_types_resolved__ = cls
# Return the class so you can use it as a decorator too.
return cls

File diff suppressed because it is too large Load diff

View file

@ -1,16 +1,24 @@
"""
This is a Python 3.6 and later-only, keyword-only, and **provisional** API that
calls `attr.s` with different default values.
# SPDX-License-Identifier: MIT
Provisional APIs that shall become "import attrs" one glorious day.
"""
These are keyword-only APIs that call `attr.s` and `attr.ib` with different
default values.
"""
from functools import partial
from attr.exceptions import UnannotatedAttributeError
from . import setters
from ._make import NOTHING, _frozen_setattrs, attrib, attrs
from ._funcs import asdict as _asdict
from ._funcs import astuple as _astuple
from ._make import (
NOTHING,
_frozen_setattrs,
_ng_default_on_setattr,
attrib,
attrs,
)
from .exceptions import UnannotatedAttributeError
def define(
@ -18,6 +26,7 @@ def define(
*,
these=None,
repr=None,
unsafe_hash=None,
hash=None,
init=None,
slots=True,
@ -34,22 +43,47 @@ def define(
getstate_setstate=None,
on_setattr=None,
field_transformer=None,
match_args=True,
):
r"""
The only behavioral differences are the handling of the *auto_attribs*
option:
Define an ``attrs`` class.
Differences to the classic `attr.s` that it uses underneath:
- Automatically detect whether or not *auto_attribs* should be `True` (c.f.
*auto_attribs* parameter).
- If *frozen* is `False`, run converters and validators when setting an
attribute by default.
- *slots=True*
.. caution::
Usually this has only upsides and few visible effects in everyday
programming. But it *can* lead to some suprising behaviors, so please
make sure to read :term:`slotted classes`.
- *auto_exc=True*
- *auto_detect=True*
- *order=False*
- Some options that were only relevant on Python 2 or were kept around for
backwards-compatibility have been removed.
Please note that these are all defaults and you can change them as you
wish.
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
1. If all attributes are annotated and no `attr.ib` is found, it assumes
*auto_attribs=True*.
1. If any attributes are annotated and no unannotated `attrs.fields`\ s
are found, it assumes *auto_attribs=True*.
2. Otherwise it assumes *auto_attribs=False* and tries to collect
`attr.ib`\ s.
`attrs.fields`\ s.
and that mutable classes (``frozen=False``) validate on ``__setattr__``.
For now, please refer to `attr.s` for the rest of the parameters.
.. versionadded:: 20.1.0
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
.. versionadded:: 22.2.0
*unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
"""
def do_it(cls, auto_attribs):
@ -58,6 +92,7 @@ def define(
these=these,
repr=repr,
hash=hash,
unsafe_hash=unsafe_hash,
init=init,
slots=slots,
frozen=frozen,
@ -74,6 +109,7 @@ def define(
getstate_setstate=getstate_setstate,
on_setattr=on_setattr,
field_transformer=field_transformer,
match_args=match_args,
)
def wrap(cls):
@ -86,9 +122,9 @@ def define(
had_on_setattr = on_setattr not in (None, setters.NO_OP)
# By default, mutable classes validate on setattr.
# By default, mutable classes convert & validate on setattr.
if frozen is False and on_setattr is None:
on_setattr = setters.validate
on_setattr = _ng_default_on_setattr
# However, if we subclass a frozen class, we inherit the immutability
# and disable on_setattr.
@ -137,6 +173,7 @@ def field(
eq=None,
order=None,
on_setattr=None,
alias=None,
):
"""
Identical to `attr.ib`, except keyword-only and with some arguments
@ -157,4 +194,33 @@ def field(
eq=eq,
order=order,
on_setattr=on_setattr,
alias=alias,
)
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
"""
Same as `attr.asdict`, except that collections types are always retained
and dict is always used as *dict_factory*.
.. versionadded:: 21.3.0
"""
return _asdict(
inst=inst,
recurse=recurse,
filter=filter,
value_serializer=value_serializer,
retain_collection_types=True,
)
def astuple(inst, *, recurse=True, filter=None):
"""
Same as `attr.astuple`, except that collections types are always retained
and `tuple` is always used as the *tuple_factory*.
.. versionadded:: 21.3.0
"""
return _astuple(
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
)

View file

@ -0,0 +1,15 @@
from typing import Any, ClassVar, Protocol
# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
MYPY = False
if MYPY:
# A protocol to be able to statically accept an attrs class.
class AttrsInstance_(Protocol):
__attrs_attrs__: ClassVar[Any]
else:
# For type checkers without plug-in support use an empty protocol that
# will (hopefully) be combined into a union.
class AttrsInstance_(Protocol):
pass

View file

@ -1,4 +1,5 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
from functools import total_ordering
@ -8,7 +9,7 @@ from ._make import attrib, attrs
@total_ordering
@attrs(eq=False, order=False, slots=True, frozen=True)
class VersionInfo(object):
class VersionInfo:
"""
A version object that can be compared to tuple of length 1--4:

View file

@ -1,16 +1,21 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful converters.
"""
from __future__ import absolute_import, division, print_function
import typing
from ._compat import _AnnotationExtractor
from ._make import NOTHING, Factory, pipe
__all__ = [
"pipe",
"optional",
"default_if_none",
"optional",
"pipe",
"to_bool",
]
@ -19,6 +24,9 @@ def optional(converter):
A converter that allows an attribute to be optional. An optional attribute
is one which can be set to ``None``.
Type annotations will be inferred from the wrapped converter's, if it
has any.
:param callable converter: the converter that is used for non-``None``
values.
@ -30,6 +38,16 @@ def optional(converter):
return None
return converter(val)
xtr = _AnnotationExtractor(converter)
t = xtr.get_first_param_type()
if t:
optional_converter.__annotations__["val"] = typing.Optional[t]
rt = xtr.get_return_type()
if rt:
optional_converter.__annotations__["return"] = typing.Optional[rt]
return optional_converter
@ -39,14 +57,14 @@ def default_if_none(default=NOTHING, factory=None):
result of *factory*.
:param default: Value to be used if ``None`` is passed. Passing an instance
of `attr.Factory` is supported, however the ``takes_self`` option
of `attrs.Factory` is supported, however the ``takes_self`` option
is *not*.
:param callable factory: A callable that takes not parameters whose result
:param callable factory: A callable that takes no parameters whose result
is used if ``None`` is passed.
:raises TypeError: If **neither** *default* or *factory* is passed.
:raises TypeError: If **both** *default* and *factory* are passed.
:raises ValueError: If an instance of `attr.Factory` is passed with
:raises ValueError: If an instance of `attrs.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
@ -83,3 +101,44 @@ def default_if_none(default=NOTHING, factory=None):
return default
return default_if_none_converter
def to_bool(val):
"""
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
Values mapping to :code:`True`:
- :code:`True`
- :code:`"true"` / :code:`"t"`
- :code:`"yes"` / :code:`"y"`
- :code:`"on"`
- :code:`"1"`
- :code:`1`
Values mapping to :code:`False`:
- :code:`False`
- :code:`"false"` / :code:`"f"`
- :code:`"no"` / :code:`"n"`
- :code:`"off"`
- :code:`"0"`
- :code:`0`
:raises ValueError: for any other value.
.. versionadded:: 21.3.0
"""
if isinstance(val, str):
val = val.lower()
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
try:
if val in truthy:
return True
if val in falsy:
return False
except TypeError:
# Raised when "val" is not hashable (e.g., lists)
pass
raise ValueError(f"Cannot convert value to bool: {val}")

View file

@ -1,4 +1,5 @@
from typing import TypeVar, Optional, Callable, overload
from typing import Callable, TypeVar, overload
from . import _ConverterType
_T = TypeVar("_T")
@ -9,3 +10,4 @@ def optional(converter: _ConverterType) -> _ConverterType: ...
def default_if_none(default: _T) -> _ConverterType: ...
@overload
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
def to_bool(val: str) -> bool: ...

View file

@ -1,9 +1,9 @@
from __future__ import absolute_import, division, print_function
# SPDX-License-Identifier: MIT
class FrozenError(AttributeError):
"""
A frozen/immutable instance or attribute haave been attempted to be
A frozen/immutable instance or attribute have been attempted to be
modified.
It mirrors the behavior of ``namedtuples`` by using the same error message

View file

@ -1,10 +1,9 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful filters for `attr.asdict`.
"""
from __future__ import absolute_import, division, print_function
from ._compat import isclass
from ._make import Attribute
@ -13,17 +12,17 @@ def _split_what(what):
Returns a tuple of `frozenset`s of classes and attributes.
"""
return (
frozenset(cls for cls in what if isclass(cls)),
frozenset(cls for cls in what if isinstance(cls, type)),
frozenset(cls for cls in what if isinstance(cls, Attribute)),
)
def include(*what):
"""
Whitelist *what*.
Include *what*.
:param what: What to whitelist.
:type what: `list` of `type` or `attr.Attribute`\\ s
:param what: What to include.
:type what: `list` of `type` or `attrs.Attribute`\\ s
:rtype: `callable`
"""
@ -37,10 +36,10 @@ def include(*what):
def exclude(*what):
"""
Blacklist *what*.
Exclude *what*.
:param what: What to blacklist.
:type what: `list` of classes or `attr.Attribute`\\ s.
:param what: What to exclude.
:type what: `list` of classes or `attrs.Attribute`\\ s.
:rtype: `callable`
"""

View file

@ -1,4 +1,5 @@
from typing import Union, Any
from typing import Any, Union
from . import Attribute, _FilterType
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...

View file

@ -1,8 +1,9 @@
# SPDX-License-Identifier: MIT
"""
Commonly used hooks for on_setattr.
"""
from __future__ import absolute_import, division, print_function
from . import _config
from .exceptions import FrozenAttributeError
@ -67,11 +68,6 @@ def convert(instance, attrib, new_value):
return new_value
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
# autodata stopped working, so the docstring is inlined in the API docs.
NO_OP = object()
"""
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
Does not work in `pipe` or within lists.
.. versionadded:: 20.1.0
"""

View file

@ -1,10 +1,11 @@
from . import _OnSetAttrType, Attribute
from typing import TypeVar, Any, NewType, NoReturn, cast
from typing import Any, NewType, NoReturn, TypeVar
from . import Attribute, _OnSetAttrType
_T = TypeVar("_T")
def frozen(
instance: Any, attribute: Attribute, new_value: Any
instance: Any, attribute: Attribute[Any], new_value: Any
) -> NoReturn: ...
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...

View file

@ -1,30 +1,100 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful validators.
"""
from __future__ import absolute_import, division, print_function
import operator
import re
from contextlib import contextmanager
from ._config import get_run_validators, set_run_validators
from ._make import _AndValidator, and_, attrib, attrs
from .converters import default_if_none
from .exceptions import NotCallableError
try:
Pattern = re.Pattern
except AttributeError: # Python <3.7 lacks a Pattern type.
Pattern = type(re.compile(""))
__all__ = [
"and_",
"deep_iterable",
"deep_mapping",
"disabled",
"ge",
"get_disabled",
"gt",
"in_",
"instance_of",
"is_callable",
"le",
"lt",
"matches_re",
"max_len",
"min_len",
"not_",
"optional",
"provides",
"set_disabled",
]
def set_disabled(disabled):
"""
Globally disable or enable running validators.
By default, they are run.
:param disabled: If ``True``, disable running all validators.
:type disabled: bool
.. warning::
This function is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(not disabled)
def get_disabled():
"""
Return a bool indicating whether validators are currently disabled or not.
:return: ``True`` if validators are currently disabled.
:rtype: bool
.. versionadded:: 21.3.0
"""
return not get_run_validators()
@contextmanager
def disabled():
"""
Context manager that disables running validators within its context.
.. warning::
This context manager is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(False)
try:
yield
finally:
set_run_validators(True)
@attrs(repr=False, slots=True, hash=True)
class _InstanceOfValidator(object):
class _InstanceOfValidator:
type = attrib()
def __call__(self, inst, attr, value):
@ -58,19 +128,18 @@ def instance_of(type):
`isinstance` therefore it's also valid to pass a tuple of types).
:param type: The type to check for.
:type type: type or tuple of types
:type type: type or tuple of type
:raises TypeError: With a human readable error message, the attribute
(of type `attr.Attribute`), the expected type, and the value it
(of type `attrs.Attribute`), the expected type, and the value it
got.
"""
return _InstanceOfValidator(type)
@attrs(repr=False, frozen=True, slots=True)
class _MatchesReValidator(object):
regex = attrib()
flags = attrib()
class _MatchesReValidator:
pattern = attrib()
match_func = attrib()
def __call__(self, inst, attr, value):
@ -79,18 +148,18 @@ class _MatchesReValidator(object):
"""
if not self.match_func(value):
raise ValueError(
"'{name}' must match regex {regex!r}"
"'{name}' must match regex {pattern!r}"
" ({value!r} doesn't)".format(
name=attr.name, regex=self.regex.pattern, value=value
name=attr.name, pattern=self.pattern.pattern, value=value
),
attr,
self.regex,
self.pattern,
value,
)
def __repr__(self):
return "<matches_re validator for pattern {regex!r}>".format(
regex=self.regex
return "<matches_re validator for pattern {pattern!r}>".format(
pattern=self.pattern
)
@ -99,48 +168,51 @@ def matches_re(regex, flags=0, func=None):
A validator that raises `ValueError` if the initializer is called
with a string that doesn't match *regex*.
:param str regex: a regex string to match against
:param regex: a regex string or precompiled pattern to match against
:param int flags: flags that will be passed to the underlying re function
(default 0)
:param callable func: which underlying `re` function to call (options
are `re.fullmatch`, `re.search`, `re.match`, default
is ``None`` which means either `re.fullmatch` or an emulation of
it on Python 2). For performance reasons, they won't be used directly
but on a pre-`re.compile`\ ed pattern.
:param callable func: which underlying `re` function to call. Valid options
are `re.fullmatch`, `re.search`, and `re.match`; the default ``None``
means `re.fullmatch`. For performance reasons, the pattern is always
precompiled using `re.compile`.
.. versionadded:: 19.2.0
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
"""
fullmatch = getattr(re, "fullmatch", None)
valid_funcs = (fullmatch, None, re.search, re.match)
valid_funcs = (re.fullmatch, None, re.search, re.match)
if func not in valid_funcs:
raise ValueError(
"'func' must be one of %s."
% (
"'func' must be one of {}.".format(
", ".join(
sorted(
e and e.__name__ or "None" for e in set(valid_funcs)
)
),
)
)
)
if isinstance(regex, Pattern):
if flags:
raise TypeError(
"'flags' can only be used with a string pattern; "
"pass flags to re.compile() instead"
)
pattern = regex
else:
pattern = re.compile(regex, flags)
if func is re.match:
match_func = pattern.match
elif func is re.search:
match_func = pattern.search
else:
if fullmatch:
match_func = pattern.fullmatch
else:
pattern = re.compile(r"(?:{})\Z".format(regex), flags)
match_func = pattern.match
return _MatchesReValidator(pattern, flags, match_func)
return _MatchesReValidator(pattern, match_func)
@attrs(repr=False, slots=True, hash=True)
class _ProvidesValidator(object):
class _ProvidesValidator:
interface = attrib()
def __call__(self, inst, attr, value):
@ -175,14 +247,14 @@ def provides(interface):
:type interface: ``zope.interface.Interface``
:raises TypeError: With a human readable error message, the attribute
(of type `attr.Attribute`), the expected interface, and the
(of type `attrs.Attribute`), the expected interface, and the
value it got.
"""
return _ProvidesValidator(interface)
@attrs(repr=False, slots=True, hash=True)
class _OptionalValidator(object):
class _OptionalValidator:
validator = attrib()
def __call__(self, inst, attr, value):
@ -216,7 +288,7 @@ def optional(validator):
@attrs(repr=False, slots=True, hash=True)
class _InValidator(object):
class _InValidator:
options = attrib()
def __call__(self, inst, attr, value):
@ -229,7 +301,10 @@ class _InValidator(object):
raise ValueError(
"'{name}' must be in {options!r} (got {value!r})".format(
name=attr.name, options=self.options, value=value
)
),
attr,
self.options,
value,
)
def __repr__(self):
@ -248,16 +323,20 @@ def in_(options):
:type options: list, tuple, `enum.Enum`, ...
:raises ValueError: With a human readable error message, the attribute (of
type `attr.Attribute`), the expected options, and the value it
type `attrs.Attribute`), the expected options, and the value it
got.
.. versionadded:: 17.1.0
.. versionchanged:: 22.1.0
The ValueError was incomplete until now and only contained the human
readable error message. Now it contains all the information that has
been promised since 17.1.0.
"""
return _InValidator(options)
@attrs(repr=False, slots=False, hash=True)
class _IsCallableValidator(object):
class _IsCallableValidator:
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
@ -287,14 +366,14 @@ def is_callable():
.. versionadded:: 19.1.0
:raises `attr.exceptions.NotCallableError`: With a human readable error
message containing the attribute (`attr.Attribute`) name,
message containing the attribute (`attrs.Attribute`) name,
and the value it got.
"""
return _IsCallableValidator()
@attrs(repr=False, slots=True, hash=True)
class _DeepIterable(object):
class _DeepIterable:
member_validator = attrib(validator=is_callable())
iterable_validator = attrib(
default=None, validator=optional(is_callable())
@ -314,7 +393,7 @@ class _DeepIterable(object):
iterable_identifier = (
""
if self.iterable_validator is None
else " {iterable!r}".format(iterable=self.iterable_validator)
else f" {self.iterable_validator!r}"
)
return (
"<deep_iterable validator for{iterable_identifier}"
@ -329,7 +408,7 @@ def deep_iterable(member_validator, iterable_validator=None):
"""
A validator that performs deep validation of an iterable.
:param member_validator: Validator to apply to iterable members
:param member_validator: Validator(s) to apply to iterable members
:param iterable_validator: Validator to apply to iterable itself
(optional)
@ -337,11 +416,13 @@ def deep_iterable(member_validator, iterable_validator=None):
:raises TypeError: if any sub-validators fail
"""
if isinstance(member_validator, (list, tuple)):
member_validator = and_(*member_validator)
return _DeepIterable(member_validator, iterable_validator)
@attrs(repr=False, slots=True, hash=True)
class _DeepMapping(object):
class _DeepMapping:
key_validator = attrib(validator=is_callable())
value_validator = attrib(validator=is_callable())
mapping_validator = attrib(default=None, validator=optional(is_callable()))
@ -377,3 +458,257 @@ def deep_mapping(key_validator, value_validator, mapping_validator=None):
:raises TypeError: if any sub-validators fail
"""
return _DeepMapping(key_validator, value_validator, mapping_validator)
@attrs(repr=False, frozen=True, slots=True)
class _NumberValidator:
bound = attrib()
compare_op = attrib()
compare_func = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.compare_func(value, self.bound):
raise ValueError(
"'{name}' must be {op} {bound}: {value}".format(
name=attr.name,
op=self.compare_op,
bound=self.bound,
value=value,
)
)
def __repr__(self):
return "<Validator for x {op} {bound}>".format(
op=self.compare_op, bound=self.bound
)
def lt(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number larger or equal to *val*.
:param val: Exclusive upper bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<", operator.lt)
def le(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number greater than *val*.
:param val: Inclusive upper bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<=", operator.le)
def ge(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number smaller than *val*.
:param val: Inclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">=", operator.ge)
def gt(val):
"""
A validator that raises `ValueError` if the initializer is called
with a number smaller or equal to *val*.
:param val: Exclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">", operator.gt)
@attrs(repr=False, frozen=True, slots=True)
class _MaxLengthValidator:
max_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) > self.max_length:
raise ValueError(
"Length of '{name}' must be <= {max}: {len}".format(
name=attr.name, max=self.max_length, len=len(value)
)
)
def __repr__(self):
return f"<max_len validator for {self.max_length}>"
def max_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is longer than *length*.
:param int length: Maximum length of the string or iterable
.. versionadded:: 21.3.0
"""
return _MaxLengthValidator(length)
@attrs(repr=False, frozen=True, slots=True)
class _MinLengthValidator:
min_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) < self.min_length:
raise ValueError(
"Length of '{name}' must be => {min}: {len}".format(
name=attr.name, min=self.min_length, len=len(value)
)
)
def __repr__(self):
return f"<min_len validator for {self.min_length}>"
def min_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is shorter than *length*.
:param int length: Minimum length of the string or iterable
.. versionadded:: 22.1.0
"""
return _MinLengthValidator(length)
@attrs(repr=False, slots=True, hash=True)
class _SubclassOfValidator:
type = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not issubclass(value, self.type):
raise TypeError(
"'{name}' must be a subclass of {type!r} "
"(got {value!r}).".format(
name=attr.name,
type=self.type,
value=value,
),
attr,
self.type,
value,
)
def __repr__(self):
return "<subclass_of validator for type {type!r}>".format(
type=self.type
)
def _subclass_of(type):
"""
A validator that raises a `TypeError` if the initializer is called
with a wrong type for this particular attribute (checks are performed using
`issubclass` therefore it's also valid to pass a tuple of types).
:param type: The type to check for.
:type type: type or tuple of types
:raises TypeError: With a human readable error message, the attribute
(of type `attrs.Attribute`), the expected type, and the value it
got.
"""
return _SubclassOfValidator(type)
@attrs(repr=False, slots=True, hash=True)
class _NotValidator:
validator = attrib()
msg = attrib(
converter=default_if_none(
"not_ validator child '{validator!r}' "
"did not raise a captured error"
)
)
exc_types = attrib(
validator=deep_iterable(
member_validator=_subclass_of(Exception),
iterable_validator=instance_of(tuple),
),
)
def __call__(self, inst, attr, value):
try:
self.validator(inst, attr, value)
except self.exc_types:
pass # suppress error to invert validity
else:
raise ValueError(
self.msg.format(
validator=self.validator,
exc_types=self.exc_types,
),
attr,
self.validator,
value,
self.exc_types,
)
def __repr__(self):
return (
"<not_ validator wrapping {what!r}, " "capturing {exc_types!r}>"
).format(
what=self.validator,
exc_types=self.exc_types,
)
def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
"""
A validator that wraps and logically 'inverts' the validator passed to it.
It will raise a `ValueError` if the provided validator *doesn't* raise a
`ValueError` or `TypeError` (by default), and will suppress the exception
if the provided validator *does*.
Intended to be used with existing validators to compose logic without
needing to create inverted variants, for example, ``not_(in_(...))``.
:param validator: A validator to be logically inverted.
:param msg: Message to raise if validator fails.
Formatted with keys ``exc_types`` and ``validator``.
:type msg: str
:param exc_types: Exception type(s) to capture.
Other types raised by child validators will not be intercepted and
pass through.
:raises ValueError: With a human readable error message,
the attribute (of type `attrs.Attribute`),
the validator that failed to raise an exception,
the value it got,
and the expected exception types.
.. versionadded:: 22.2.0
"""
try:
exc_types = tuple(exc_types)
except TypeError:
exc_types = (exc_types,)
return _NotValidator(validator, msg, exc_types)

View file

@ -1,20 +1,24 @@
from typing import (
Container,
List,
Union,
TypeVar,
Type,
Any,
Optional,
Tuple,
Iterable,
Mapping,
Callable,
Match,
AnyStr,
Callable,
Container,
ContextManager,
Iterable,
List,
Mapping,
Match,
Optional,
Pattern,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from . import _ValidatorType
from . import _ValidatorArgType
_T = TypeVar("_T")
_T1 = TypeVar("_T1")
@ -25,6 +29,10 @@ _K = TypeVar("_K")
_V = TypeVar("_V")
_M = TypeVar("_M", bound=Mapping)
def set_disabled(run: bool) -> None: ...
def get_disabled() -> bool: ...
def disabled() -> ContextManager[None]: ...
# To be more precise on instance_of use some overloads.
# If there are more than 3 items in the tuple then we fall back to Any
@overload
@ -48,14 +56,14 @@ def optional(
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
def matches_re(
regex: AnyStr,
regex: Union[Pattern[AnyStr], AnyStr],
flags: int = ...,
func: Optional[
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
] = ...,
) -> _ValidatorType[AnyStr]: ...
def deep_iterable(
member_validator: _ValidatorType[_T],
member_validator: _ValidatorArgType[_T],
iterable_validator: Optional[_ValidatorType[_I]] = ...,
) -> _ValidatorType[_I]: ...
def deep_mapping(
@ -64,3 +72,15 @@ def deep_mapping(
mapping_validator: Optional[_ValidatorType[_M]] = ...,
) -> _ValidatorType[_M]: ...
def is_callable() -> _ValidatorType[_T]: ...
def lt(val: _T) -> _ValidatorType[_T]: ...
def le(val: _T) -> _ValidatorType[_T]: ...
def ge(val: _T) -> _ValidatorType[_T]: ...
def gt(val: _T) -> _ValidatorType[_T]: ...
def max_len(length: int) -> _ValidatorType[_T]: ...
def min_len(length: int) -> _ValidatorType[_T]: ...
def not_(
validator: _ValidatorType[_T],
*,
msg: Optional[str] = None,
exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ...
) -> _ValidatorType[_T]: ...