Merge branch 'feature/UpdateAttr' into dev

This commit is contained in:
JackDandy 2023-10-08 00:21:24 +01:00
commit d69acc3294
14 changed files with 319 additions and 341 deletions

View file

@ -1,5 +1,6 @@
### 3.31.0 (2023-1x-xx xx:xx:00 UTC)
* Update attr 22.2.0 (683d056) to 23.1.0 (67e4ff2)
* Update Beautiful Soup 4.12.2 to 4.12.2 (30c58a1)
* Update soupsieve 2.4.1 (2e66beb) to 2.5.0 (dc71495)
* Update hachoir 3.1.2 (f739b43) to 3.2.0 (38d759f)

View file

@ -9,6 +9,7 @@ from typing import Callable
from . import converters, exceptions, filters, setters, validators
from ._cmp import cmp_using
from ._compat import Protocol
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
@ -31,7 +32,7 @@ ib = attr = attrib
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
class AttrsInstance:
class AttrsInstance(Protocol):
pass
@ -90,8 +91,9 @@ def _make_getattr(mod_name: str) -> Callable:
"__email__": "",
"__license__": "license",
}
if name not in dunder_to_metadata.keys():
raise AttributeError(f"module {mod_name} has no attribute {name}")
if name not in dunder_to_metadata:
msg = f"module {mod_name} has no attribute {name}"
raise AttributeError(msg)
import sys
import warnings
@ -101,7 +103,7 @@ def _make_getattr(mod_name: str) -> Callable:
else:
from importlib.metadata import metadata
if name != "__version_info__":
if name not in ("__version__", "__version_info__"):
warnings.warn(
f"Accessing {mod_name}.{name} is deprecated and will be "
"removed in a future release. Use importlib.metadata directly "
@ -113,15 +115,15 @@ def _make_getattr(mod_name: str) -> Callable:
meta = metadata("attrs")
if name == "__license__":
return "MIT"
elif name == "__copyright__":
if name == "__copyright__":
return "Copyright (c) 2015 Hynek Schlawack"
elif name in ("__uri__", "__url__"):
if name in ("__uri__", "__url__"):
return meta["Project-URL"].split(" ", 1)[-1]
elif name == "__version_info__":
if name == "__version_info__":
return VersionInfo._from_version_string(meta["version"])
elif name == "__author__":
if name == "__author__":
return meta["Author-email"].rsplit(" ", 1)[0]
elif name == "__email__":
if name == "__email__":
return meta["Author-email"].rsplit("<", 1)[1][:-1]
return meta[dunder_to_metadata[name]]

View file

@ -33,6 +33,11 @@ if sys.version_info >= (3, 10):
else:
from typing_extensions import TypeGuard
if sys.version_info >= (3, 11):
from typing import dataclass_transform
else:
from typing_extensions import dataclass_transform
__version__: str
__version_info__: VersionInfo
__title__: str
@ -69,8 +74,7 @@ _ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
class AttrsInstance(AttrsInstance_, Protocol):
pass
_A = TypeVar("_A", bound=AttrsInstance)
# _make --
_A = TypeVar("_A", bound=type[AttrsInstance])
class _Nothing(enum.Enum):
NOTHING = enum.auto()
@ -104,23 +108,6 @@ else:
takes_self: bool = ...,
) -> _T: ...
# Static type inference support via __dataclass_transform__ implemented as per:
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
# This annotation must be applied to all overloads of "define" and "attrs"
#
# NOTE: This is a typing construct and does not exist at runtime. Extensions
# wrapping attrs decorators should declare a separate __dataclass_transform__
# signature in the extension module using the specification linked above to
# provide pyright support.
def __dataclass_transform__(
*,
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
frozen_default: bool = False,
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
) -> Callable[[_T], _T]: ...
class Attribute(Generic[_T]):
name: str
default: Optional[_T]
@ -323,7 +310,7 @@ def field(
type: Optional[type] = ...,
) -> Any: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
def attrs(
maybe_cls: _C,
these: Optional[Dict[str, Any]] = ...,
@ -351,7 +338,7 @@ def attrs(
unsafe_hash: Optional[bool] = ...,
) -> _C: ...
@overload
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
def attrs(
maybe_cls: None = ...,
these: Optional[Dict[str, Any]] = ...,
@ -379,7 +366,7 @@ def attrs(
unsafe_hash: Optional[bool] = ...,
) -> Callable[[_C], _C]: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
@dataclass_transform(field_specifiers=(attrib, field))
def define(
maybe_cls: _C,
*,
@ -405,7 +392,7 @@ def define(
match_args: bool = ...,
) -> _C: ...
@overload
@__dataclass_transform__(field_descriptors=(attrib, field))
@dataclass_transform(field_specifiers=(attrib, field))
def define(
maybe_cls: None = ...,
*,
@ -434,9 +421,7 @@ def define(
mutable = define
@overload
@__dataclass_transform__(
frozen_default=True, field_descriptors=(attrib, field)
)
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
def frozen(
maybe_cls: _C,
*,
@ -462,9 +447,7 @@ def frozen(
match_args: bool = ...,
) -> _C: ...
@overload
@__dataclass_transform__(
frozen_default=True, field_descriptors=(attrib, field)
)
@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
def frozen(
maybe_cls: None = ...,
*,

View file

@ -92,10 +92,8 @@ def cmp_using(
if not has_eq_function:
# functools.total_ordering requires __eq__ to be defined,
# so raise early error here to keep a nice stack.
raise ValueError(
"eq must be define is order to complete ordering from "
"lt, le, gt, ge."
)
msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
raise ValueError(msg)
type_ = functools.total_ordering(type_)
return type_
@ -142,10 +140,7 @@ def _is_comparable_to(self, other):
"""
Check whether `other` is comparable to `self`.
"""
for func in self._requirements:
if not func(self, other):
return False
return True
return all(func(self, other) for func in self._requirements)
def _check_same_type(self, other):

View file

@ -1,6 +1,5 @@
# SPDX-License-Identifier: MIT
import inspect
import platform
import sys
@ -8,7 +7,7 @@ import threading
import types
import warnings
from collections.abc import Mapping, Sequence # noqa
from collections.abc import Mapping, Sequence # noqa: F401
from typing import _GenericAlias
@ -18,6 +17,15 @@ PY310 = sys.version_info[:2] >= (3, 10)
PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
if sys.version_info < (3, 8):
try:
from typing_extensions import Protocol
except ImportError: # pragma: no cover
Protocol = object
else:
from typing import Protocol # noqa: F401
def just_warn(*args, **kw):
warnings.warn(
"Running interpreter doesn't sufficiently support code object "
@ -155,7 +163,7 @@ def make_set_closure_cell():
if cell.cell_contents != 100:
raise AssertionError # pragma: no cover
except Exception:
except Exception: # noqa: BLE001
return just_warn
else:
return set_closure_cell

View file

@ -1,6 +1,5 @@
# SPDX-License-Identifier: MIT
__all__ = ["set_run_validators", "get_run_validators"]
_run_validators = True
@ -15,7 +14,8 @@ def set_run_validators(run):
instead.
"""
if not isinstance(run, bool):
raise TypeError("'run' must be bool.")
msg = "'run' must be bool."
raise TypeError(msg)
global _run_validators
_run_validators = run

View file

@ -72,19 +72,25 @@ def asdict(
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
rv[a.name] = cf(
[
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in v
]
)
items = [
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in v
]
try:
rv[a.name] = cf(items)
except TypeError:
if not issubclass(cf, tuple):
raise
# Workaround for TypeError: cf.__new__() missing 1 required
# positional argument (which appears, for a namedturle)
rv[a.name] = cf(*items)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
@ -241,22 +247,26 @@ def astuple(
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
for j in v
]
items = [
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
if has(j.__class__)
else j
for j in v
]
try:
rv.append(cf(items))
except TypeError:
if not issubclass(cf, tuple):
raise
# Workaround for TypeError: cf.__new__() missing 1 required
# positional argument (which appears, for a namedturle)
rv.append(cf(*items))
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
@ -344,9 +354,8 @@ def assoc(inst, **changes):
for k, v in changes.items():
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
raise AttrsAttributeNotFoundError(
f"{k} is not an attrs attribute on {new.__class__}."
)
msg = f"{k} is not an attrs attribute on {new.__class__}."
raise AttrsAttributeNotFoundError(msg)
_obj_setattr(new, k, v)
return new
@ -379,17 +388,14 @@ def evolve(*args, **changes):
try:
(inst,) = args
except ValueError:
raise TypeError(
f"evolve() takes 1 positional argument, but {len(args)} "
"were given"
) from None
msg = f"evolve() takes 1 positional argument, but {len(args)} were given"
raise TypeError(msg) from None
else:
try:
inst = changes.pop("inst")
except KeyError:
raise TypeError(
"evolve() missing 1 required positional argument: 'inst'"
) from None
msg = "evolve() missing 1 required positional argument: 'inst'"
raise TypeError(msg) from None
import warnings

View file

@ -1,7 +1,9 @@
# SPDX-License-Identifier: MIT
import contextlib
import copy
import enum
import inspect
import linecache
import sys
import types
@ -87,7 +89,7 @@ class _CacheHashWrapper(int):
See GH #613 for more details.
"""
def __reduce__(self, _none_constructor=type(None), _args=()):
def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
return _none_constructor, _args
@ -248,18 +250,18 @@ def attrib(
)
if hash is not None and hash is not True and hash is not False:
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
msg = "Invalid value for hash. Must be True, False, or None."
raise TypeError(msg)
if factory is not None:
if default is not NOTHING:
raise ValueError(
"The `default` and `factory` arguments are mutually "
"exclusive."
msg = (
"The `default` and `factory` arguments are mutually exclusive."
)
raise ValueError(msg)
if not callable(factory):
raise ValueError("The `factory` argument must be a callable.")
msg = "The `factory` argument must be a callable."
raise ValueError(msg)
default = Factory(factory)
if metadata is None:
@ -323,9 +325,9 @@ def _make_method(name, script, filename, globs):
old_val = linecache.cache.setdefault(filename, linecache_tuple)
if old_val == linecache_tuple:
break
else:
filename = f"{base_filename[:-1]}-{count}>"
count += 1
filename = f"{base_filename[:-1]}-{count}>"
count += 1
_compile_and_eval(script, globs, locs, filename)
@ -430,7 +432,7 @@ def _collect_base_attrs(cls, taken_attr_names):
if a.inherited or a.name in taken_attr_names:
continue
a = a.evolve(inherited=True)
a = a.evolve(inherited=True) # noqa: PLW2901
base_attrs.append(a)
base_attr_map[a.name] = base_cls
@ -468,7 +470,7 @@ def _collect_base_attrs_broken(cls, taken_attr_names):
if a.name in taken_attr_names:
continue
a = a.evolve(inherited=True)
a = a.evolve(inherited=True) # noqa: PLW2901
taken_attr_names.add(a.name)
base_attrs.append(a)
base_attr_map[a.name] = base_cls
@ -493,7 +495,7 @@ def _transform_attrs(
anns = _get_annotations(cls)
if these is not None:
ca_list = [(name, ca) for name, ca in these.items()]
ca_list = list(these.items())
elif auto_attribs is True:
ca_names = {
name
@ -509,10 +511,7 @@ def _transform_attrs(
a = cd.get(attr_name, NOTHING)
if not isinstance(a, _CountingAttr):
if a is NOTHING:
a = attrib()
else:
a = attrib(default=a)
a = attrib() if a is NOTHING else attrib(default=a)
ca_list.append((attr_name, a))
unannotated = ca_names - annot_names
@ -563,10 +562,8 @@ def _transform_attrs(
had_default = False
for a in (a for a in attrs if a.init is not False and a.kw_only is False):
if had_default is True and a.default is NOTHING:
raise ValueError(
"No mandatory attributes allowed after an attribute with a "
f"default value or factory. Attribute in question: {a!r}"
)
msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
raise ValueError(msg)
if had_default is False and a.default is not NOTHING:
had_default = True
@ -628,6 +625,7 @@ class _ClassBuilder:
"_delete_attribs",
"_frozen",
"_has_pre_init",
"_pre_init_has_args",
"_has_post_init",
"_is_exc",
"_on_setattr",
@ -674,6 +672,13 @@ class _ClassBuilder:
self._weakref_slot = weakref_slot
self._cache_hash = cache_hash
self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
self._pre_init_has_args = False
if self._has_pre_init:
# Check if the pre init method has more arguments than just `self`
# We want to pass arguments if pre init expects arguments
pre_init_func = cls.__attrs_pre_init__
pre_init_signature = inspect.signature(pre_init_func)
self._pre_init_has_args = len(pre_init_signature.parameters) > 1
self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
self._delete_attribs = not bool(these)
self._is_exc = is_exc
@ -768,13 +773,11 @@ class _ClassBuilder:
name not in base_names
and getattr(cls, name, _sentinel) is not _sentinel
):
try:
# An AttributeError can happen if a base class defines a
# class variable and we want to set an attribute with the
# same name by using only a type annotation.
with contextlib.suppress(AttributeError):
delattr(cls, name)
except AttributeError:
# This can happen if a base class defines a class
# variable and we want to set an attribute with the
# same name by using only a type annotation.
pass
# Attach our dunder methods.
for name, value in self._cls_dict.items():
@ -799,7 +802,7 @@ class _ClassBuilder:
cd = {
k: v
for k, v in self._cls_dict.items()
if k not in tuple(self._attr_names) + ("__dict__", "__weakref__")
if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
}
# If our class doesn't have its own implementation of __setattr__
@ -821,7 +824,7 @@ class _ClassBuilder:
# Traverse the MRO to collect existing slots
# and check for an existing __weakref__.
existing_slots = dict()
existing_slots = {}
weakref_inherited = False
for base_cls in self._cls.__mro__[1:-1]:
if base_cls.__dict__.get("__weakref__", None) is not None:
@ -890,7 +893,8 @@ class _ClassBuilder:
for cell in closure_cells:
try:
match = cell.cell_contents is self._cls
except ValueError: # ValueError: Cell is empty
except ValueError: # noqa: PERF203
# ValueError: Cell is empty
pass
else:
if match:
@ -907,9 +911,8 @@ class _ClassBuilder:
def add_str(self):
repr = self._cls_dict.get("__repr__")
if repr is None:
raise ValueError(
"__str__ can only be generated if a __repr__ exists."
)
msg = "__str__ can only be generated if a __repr__ exists."
raise ValueError(msg)
def __str__(self):
return self.__repr__()
@ -980,6 +983,7 @@ class _ClassBuilder:
self._cls,
self._attrs,
self._has_pre_init,
self._pre_init_has_args,
self._has_post_init,
self._frozen,
self._slots,
@ -1006,6 +1010,7 @@ class _ClassBuilder:
self._cls,
self._attrs,
self._has_pre_init,
self._pre_init_has_args,
self._has_post_init,
self._frozen,
self._slots,
@ -1054,9 +1059,8 @@ class _ClassBuilder:
if self._has_custom_setattr:
# We need to write a __setattr__ but there already is one!
raise ValueError(
"Can't combine custom __setattr__ with on_setattr hooks."
)
msg = "Can't combine custom __setattr__ with on_setattr hooks."
raise ValueError(msg)
# docstring comes from _add_method_dunders
def __setattr__(self, name, val):
@ -1079,25 +1083,17 @@ class _ClassBuilder:
"""
Add __module__ and __qualname__ to a *method* if possible.
"""
try:
with contextlib.suppress(AttributeError):
method.__module__ = self._cls.__module__
except AttributeError:
pass
try:
method.__qualname__ = ".".join(
(self._cls.__qualname__, method.__name__)
)
except AttributeError:
pass
with contextlib.suppress(AttributeError):
method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
try:
with contextlib.suppress(AttributeError):
method.__doc__ = (
"Method generated by attrs for class "
f"{self._cls.__qualname__}."
)
except AttributeError:
pass
return method
@ -1108,7 +1104,8 @@ def _determine_attrs_eq_order(cmp, eq, order, default_eq):
values of eq and order. If *eq* is None, set it to *default_eq*.
"""
if cmp is not None and any((eq is not None, order is not None)):
raise ValueError("Don't mix `cmp` with `eq' and `order`.")
msg = "Don't mix `cmp` with `eq' and `order`."
raise ValueError(msg)
# cmp takes precedence due to bw-compatibility.
if cmp is not None:
@ -1123,7 +1120,8 @@ def _determine_attrs_eq_order(cmp, eq, order, default_eq):
order = eq
if eq is False and order is True:
raise ValueError("`order` can only be True if `eq` is True too.")
msg = "`order` can only be True if `eq` is True too."
raise ValueError(msg)
return eq, order
@ -1134,7 +1132,8 @@ def _determine_attrib_eq_order(cmp, eq, order, default_eq):
values of eq and order. If *eq* is None, set it to *default_eq*.
"""
if cmp is not None and any((eq is not None, order is not None)):
raise ValueError("Don't mix `cmp` with `eq' and `order`.")
msg = "Don't mix `cmp` with `eq' and `order`."
raise ValueError(msg)
def decide_callable_or_boolean(value):
"""
@ -1164,7 +1163,8 @@ def _determine_attrib_eq_order(cmp, eq, order, default_eq):
order, order_key = decide_callable_or_boolean(order)
if eq is False and order is True:
raise ValueError("`order` can only be True if `eq` is True too.")
msg = "`order` can only be True if `eq` is True too."
raise ValueError(msg)
return eq, eq_key, order, order_key
@ -1494,7 +1494,8 @@ def attrs(
)
if has_own_setattr and is_frozen:
raise ValueError("Can't freeze a class with a custom __setattr__.")
msg = "Can't freeze a class with a custom __setattr__."
raise ValueError(msg)
builder = _ClassBuilder(
cls,
@ -1547,18 +1548,15 @@ def attrs(
if hash is not True and hash is not False and hash is not None:
# Can't use `hash in` because 1 == True for example.
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
elif hash is False or (hash is None and eq is False) or is_exc:
msg = "Invalid value for hash. Must be True, False, or None."
raise TypeError(msg)
if hash is False or (hash is None and eq is False) or is_exc:
# Don't do anything. Should fall back to __object__'s __hash__
# which is by id.
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" hashing must be either explicitly or implicitly "
"enabled."
)
msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
raise TypeError(msg)
elif hash is True or (
hash is None and eq is True and is_frozen is True
):
@ -1567,11 +1565,8 @@ def attrs(
else:
# Raise TypeError on attempts to hash.
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" hashing must be either explicitly or implicitly "
"enabled."
)
msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
raise TypeError(msg)
builder.make_unhashable()
if _determine_whether_to_implement(
@ -1581,10 +1576,8 @@ def attrs(
else:
builder.add_attrs_init()
if cache_hash:
raise TypeError(
"Invalid value for cache_hash. To use hash caching,"
" init must be True."
)
msg = "Invalid value for cache_hash. To use hash caching, init must be True."
raise TypeError(msg)
if (
PY310
@ -1599,8 +1592,8 @@ def attrs(
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
return wrap(maybe_cls)
_attrs = attrs
@ -1648,10 +1641,7 @@ def _make_hash(cls, attrs, frozen, cache_hash):
else:
hash_def += ", *"
hash_def += (
", _cache_wrapper="
+ "__import__('attr._make')._make._CacheHashWrapper):"
)
hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
hash_func = "_cache_wrapper(" + hash_func
closing_braces += ")"
@ -1760,7 +1750,7 @@ def _make_eq(cls, attrs):
lines.append(f" self.{a.name},")
others.append(f" other.{a.name},")
lines += others + [" )"]
lines += [*others, " )"]
else:
lines.append(" return True")
@ -1928,7 +1918,8 @@ def fields(cls):
generic_base = get_generic_base(cls)
if generic_base is None and not isinstance(cls, type):
raise TypeError("Passed object must be a class.")
msg = "Passed object must be a class."
raise TypeError(msg)
attrs = getattr(cls, "__attrs_attrs__", None)
@ -1941,7 +1932,8 @@ def fields(cls):
# efficient.
cls.__attrs_attrs__ = attrs
return attrs
raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.")
msg = f"{cls!r} is not an attrs-decorated class."
raise NotAnAttrsClassError(msg)
return attrs
@ -1962,10 +1954,12 @@ def fields_dict(cls):
.. versionadded:: 18.1.0
"""
if not isinstance(cls, type):
raise TypeError("Passed object must be a class.")
msg = "Passed object must be a class."
raise TypeError(msg)
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is None:
raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.")
msg = f"{cls!r} is not an attrs-decorated class."
raise NotAnAttrsClassError(msg)
return {a.name: a for a in attrs}
@ -2001,6 +1995,7 @@ def _make_init(
cls,
attrs,
pre_init,
pre_init_has_args,
post_init,
frozen,
slots,
@ -2015,7 +2010,8 @@ def _make_init(
)
if frozen and has_cls_on_setattr:
raise ValueError("Frozen classes can't use on_setattr.")
msg = "Frozen classes can't use on_setattr."
raise ValueError(msg)
needs_cached_setattr = cache_hash or frozen
filtered_attrs = []
@ -2029,7 +2025,8 @@ def _make_init(
if a.on_setattr is not None:
if frozen is True:
raise ValueError("Frozen classes can't use on_setattr.")
msg = "Frozen classes can't use on_setattr."
raise ValueError(msg)
needs_cached_setattr = True
elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
@ -2042,6 +2039,7 @@ def _make_init(
frozen,
slots,
pre_init,
pre_init_has_args,
post_init,
cache_hash,
base_attr_map,
@ -2122,6 +2120,7 @@ def _attrs_to_init_script(
frozen,
slots,
pre_init,
pre_init_has_args,
post_init,
cache_hash,
base_attr_map,
@ -2208,10 +2207,7 @@ def _attrs_to_init_script(
arg_name = a.alias
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = "self"
else:
maybe_self = ""
maybe_self = "self" if has_factory and a.default.takes_self else ""
if a.init is False:
if has_factory:
@ -2235,25 +2231,24 @@ def _attrs_to_init_script(
)
)
names_for_globals[init_factory_name] = a.default.factory
elif a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
f"attr_dict['{attr_name}'].default",
has_on_setattr,
)
)
conv_name = _init_converter_pat % (a.name,)
names_for_globals[conv_name] = a.converter
else:
if a.converter is not None:
lines.append(
fmt_setter_with_converter(
attr_name,
f"attr_dict['{attr_name}'].default",
has_on_setattr,
)
)
conv_name = _init_converter_pat % (a.name,)
names_for_globals[conv_name] = a.converter
else:
lines.append(
fmt_setter(
attr_name,
f"attr_dict['{attr_name}'].default",
has_on_setattr,
)
lines.append(
fmt_setter(
attr_name,
f"attr_dict['{attr_name}'].default",
has_on_setattr,
)
)
elif a.default is not NOTHING and not has_factory:
arg = f"{arg_name}=attr_dict['{attr_name}'].default"
if a.kw_only:
@ -2362,7 +2357,7 @@ def _attrs_to_init_script(
# hash code would result in silent bugs.
if cache_hash:
if frozen:
if slots:
if slots: # noqa: SIM108
# if frozen and slots, then _setattr defined above
init_hash_cache = "_setattr('%s', %s)"
else:
@ -2380,11 +2375,23 @@ def _attrs_to_init_script(
lines.append(f"BaseException.__init__(self, {vals})")
args = ", ".join(args)
pre_init_args = args
if kw_only_args:
args += "%s*, %s" % (
", " if args else "", # leading comma
", ".join(kw_only_args), # kw_only args
)
pre_init_kw_only_args = ", ".join(
["%s=%s" % (kw_arg, kw_arg) for kw_arg in kw_only_args]
)
pre_init_args += (
", " if pre_init_args else ""
) # handle only kwargs and no regular args
pre_init_args += pre_init_kw_only_args
if pre_init and pre_init_has_args:
# If pre init method has arguments, pass same arguments as `__init__`
lines[0] = "self.__attrs_pre_init__(%s)" % pre_init_args
return (
"def %s(self, %s):\n %s\n"
@ -2537,9 +2544,8 @@ class Attribute:
if type is None:
type = ca.type
elif ca.type is not None:
raise ValueError(
"Type annotation and type argument cannot both be present"
)
msg = "Type annotation and type argument cannot both be present"
raise ValueError(msg)
inst_dict = {
k: getattr(ca, k)
for k in Attribute.__slots__
@ -2663,36 +2669,37 @@ class _CountingAttr:
"on_setattr",
"alias",
)
__attrs_attrs__ = tuple(
Attribute(
name=name,
alias=_default_init_alias_for(name),
default=NOTHING,
validator=None,
repr=True,
cmp=None,
hash=True,
init=True,
kw_only=False,
eq=True,
eq_key=None,
order=False,
order_key=None,
inherited=False,
on_setattr=None,
)
for name in (
"counter",
"_default",
"repr",
"eq",
"order",
"hash",
"init",
"on_setattr",
"alias",
)
) + (
__attrs_attrs__ = (
*tuple(
Attribute(
name=name,
alias=_default_init_alias_for(name),
default=NOTHING,
validator=None,
repr=True,
cmp=None,
hash=True,
init=True,
kw_only=False,
eq=True,
eq_key=None,
order=False,
order_key=None,
inherited=False,
on_setattr=None,
)
for name in (
"counter",
"_default",
"repr",
"eq",
"order",
"hash",
"init",
"on_setattr",
"alias",
)
),
Attribute(
name="metadata",
alias="metadata",
@ -2868,7 +2875,8 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments):
elif isinstance(attrs, (list, tuple)):
cls_dict = {a: attrib() for a in attrs}
else:
raise TypeError("attrs argument must be a dict or a list.")
msg = "attrs argument must be a dict or a list."
raise TypeError(msg)
pre_init = cls_dict.pop("__attrs_pre_init__", None)
post_init = cls_dict.pop("__attrs_post_init__", None)
@ -2888,12 +2896,10 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments):
# frame where the class is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
with contextlib.suppress(AttributeError, ValueError):
type_.__module__ = sys._getframe(1).f_globals.get(
"__name__", "__main__"
)
except (AttributeError, ValueError):
pass
# We do it here for proper warnings with meaningful stacklevel.
cmp = attributes_arguments.pop("cmp", None)

View file

@ -59,7 +59,7 @@ def define(
.. caution::
Usually this has only upsides and few visible effects in everyday
programming. But it *can* lead to some suprising behaviors, so please
programming. But it *can* lead to some surprising behaviors, so please
make sure to read :term:`slotted classes`.
- *auto_exc=True*
- *auto_detect=True*
@ -131,10 +131,8 @@ def define(
for base_cls in cls.__bases__:
if base_cls.__setattr__ is _frozen_setattrs:
if had_on_setattr:
raise ValueError(
"Frozen classes can't use on_setattr "
"(frozen-ness was inherited)."
)
msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
raise ValueError(msg)
on_setattr = setters.NO_OP
break
@ -151,8 +149,8 @@ def define(
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
return wrap(maybe_cls)
mutable = define
@ -180,10 +178,9 @@ def field(
Identical to `attr.ib`, except keyword-only and with some arguments
removed.
.. versionadded:: 22.3.0
The *type* parameter has been re-added; mostly for
{func}`attrs.make_class`. Please note that type checkers ignore this
metadata.
.. versionadded:: 23.1.0
The *type* parameter has been re-added; mostly for `attrs.make_class`.
Please note that type checkers ignore this metadata.
.. versionadded:: 20.1.0
"""
return attrib(

View file

@ -70,21 +70,20 @@ def default_if_none(default=NOTHING, factory=None):
.. versionadded:: 18.2.0
"""
if default is NOTHING and factory is None:
raise TypeError("Must pass either `default` or `factory`.")
msg = "Must pass either `default` or `factory`."
raise TypeError(msg)
if default is not NOTHING and factory is not None:
raise TypeError(
"Must pass either `default` or `factory` but not both."
)
msg = "Must pass either `default` or `factory` but not both."
raise TypeError(msg)
if factory is not None:
default = Factory(factory)
if isinstance(default, Factory):
if default.takes_self:
raise ValueError(
"`takes_self` is not supported by default_if_none."
)
msg = "`takes_self` is not supported by default_if_none."
raise ValueError(msg)
def default_if_none_converter(val):
if val is not None:
@ -141,4 +140,5 @@ def to_bool(val):
except TypeError:
# Raised when "val" is not hashable (e.g., lists)
pass
raise ValueError(f"Cannot convert value to bool: {val}")
msg = f"Cannot convert value to bool: {val}"
raise ValueError(msg)

View file

@ -1,5 +1,9 @@
# SPDX-License-Identifier: MIT
from __future__ import annotations
from typing import ClassVar
class FrozenError(AttributeError):
"""
@ -13,7 +17,7 @@ class FrozenError(AttributeError):
"""
msg = "can't set attribute"
args = [msg]
args: ClassVar[tuple[str]] = [msg]
class FrozenInstanceError(FrozenError):

View file

@ -13,6 +13,7 @@ def _split_what(what):
"""
return (
frozenset(cls for cls in what if isinstance(cls, type)),
frozenset(cls for cls in what if isinstance(cls, str)),
frozenset(cls for cls in what if isinstance(cls, Attribute)),
)
@ -22,14 +23,21 @@ def include(*what):
Include *what*.
:param what: What to include.
:type what: `list` of `type` or `attrs.Attribute`\\ s
:type what: `list` of classes `type`, field names `str` or
`attrs.Attribute`\\ s
:rtype: `callable`
.. versionchanged:: 23.1.0 Accept strings with field names.
"""
cls, attrs = _split_what(what)
cls, names, attrs = _split_what(what)
def include_(attribute, value):
return value.__class__ in cls or attribute in attrs
return (
value.__class__ in cls
or attribute.name in names
or attribute in attrs
)
return include_
@ -39,13 +47,20 @@ def exclude(*what):
Exclude *what*.
:param what: What to exclude.
:type what: `list` of classes or `attrs.Attribute`\\ s.
:type what: `list` of classes `type`, field names `str` or
`attrs.Attribute`\\ s.
:rtype: `callable`
.. versionchanged:: 23.3.0 Accept field name string as input argument
"""
cls, attrs = _split_what(what)
cls, names, attrs = _split_what(what)
def exclude_(attribute, value):
return value.__class__ not in cls and attribute not in attrs
return not (
value.__class__ in cls
or attribute.name in names
or attribute in attrs
)
return exclude_

View file

@ -2,5 +2,5 @@ from typing import Any, Union
from . import Attribute, _FilterType
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
def include(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ...
def exclude(*what: Union[type, str, Attribute[Any]]) -> _FilterType[Any]: ...

View file

@ -97,23 +97,21 @@ class _InstanceOfValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if not isinstance(value, self.type):
msg = "'{name}' must be {type!r} (got {value!r} that is a {actual!r}).".format(
name=attr.name,
type=self.type,
actual=value.__class__,
value=value,
)
raise TypeError(
"'{name}' must be {type!r} (got {value!r} that is a "
"{actual!r}).".format(
name=attr.name,
type=self.type,
actual=value.__class__,
value=value,
),
msg,
attr,
self.type,
value,
)
def __repr__(self):
return "<instance_of validator for type {type!r}>".format(
type=self.type
)
return f"<instance_of validator for type {self.type!r}>"
def instance_of(type):
@ -142,20 +140,18 @@ class _MatchesReValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if not self.match_func(value):
msg = "'{name}' must match regex {pattern!r} ({value!r} doesn't)".format(
name=attr.name, pattern=self.pattern.pattern, value=value
)
raise ValueError(
"'{name}' must match regex {pattern!r}"
" ({value!r} doesn't)".format(
name=attr.name, pattern=self.pattern.pattern, value=value
),
msg,
attr,
self.pattern,
value,
)
def __repr__(self):
return "<matches_re validator for pattern {pattern!r}>".format(
pattern=self.pattern
)
return f"<matches_re validator for pattern {self.pattern!r}>"
def matches_re(regex, flags=0, func=None):
@ -176,22 +172,17 @@ def matches_re(regex, flags=0, func=None):
"""
valid_funcs = (re.fullmatch, None, re.search, re.match)
if func not in valid_funcs:
raise ValueError(
"'func' must be one of {}.".format(
", ".join(
sorted(
e and e.__name__ or "None" for e in set(valid_funcs)
)
)
msg = "'func' must be one of {}.".format(
", ".join(
sorted(e and e.__name__ or "None" for e in set(valid_funcs))
)
)
raise ValueError(msg)
if isinstance(regex, Pattern):
if flags:
raise TypeError(
"'flags' can only be used with a string pattern; "
"pass flags to re.compile() instead"
)
msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
raise TypeError(msg)
pattern = regex
else:
pattern = re.compile(regex, flags)
@ -215,20 +206,18 @@ class _ProvidesValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if not self.interface.providedBy(value):
msg = "'{name}' must provide {interface!r} which {value!r} doesn't.".format(
name=attr.name, interface=self.interface, value=value
)
raise TypeError(
"'{name}' must provide {interface!r} which {value!r} "
"doesn't.".format(
name=attr.name, interface=self.interface, value=value
),
msg,
attr,
self.interface,
value,
)
def __repr__(self):
return "<provides validator for interface {interface!r}>".format(
interface=self.interface
)
return f"<provides validator for interface {self.interface!r}>"
def provides(interface):
@ -269,9 +258,7 @@ class _OptionalValidator:
self.validator(inst, attr, value)
def __repr__(self):
return "<optional validator for {what} or None>".format(
what=repr(self.validator)
)
return f"<optional validator for {self.validator!r} or None>"
def optional(validator):
@ -304,19 +291,16 @@ class _InValidator:
in_options = False
if not in_options:
msg = f"'{attr.name}' must be in {self.options!r} (got {value!r})"
raise ValueError(
"'{name}' must be in {options!r} (got {value!r})".format(
name=attr.name, options=self.options, value=value
),
msg,
attr,
self.options,
value,
)
def __repr__(self):
return "<in_ validator with options {options!r}>".format(
options=self.options
)
return f"<in_ validator with options {self.options!r}>"
def in_(options):
@ -402,11 +386,8 @@ class _DeepIterable:
else f" {self.iterable_validator!r}"
)
return (
"<deep_iterable validator for{iterable_identifier}"
" iterables of {member!r}>"
).format(
iterable_identifier=iterable_identifier,
member=self.member_validator,
f"<deep_iterable validator for{iterable_identifier}"
f" iterables of {self.member_validator!r}>"
)
@ -477,19 +458,11 @@ class _NumberValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if not self.compare_func(value, self.bound):
raise ValueError(
"'{name}' must be {op} {bound}: {value}".format(
name=attr.name,
op=self.compare_op,
bound=self.bound,
value=value,
)
)
msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
raise ValueError(msg)
def __repr__(self):
return "<Validator for x {op} {bound}>".format(
op=self.compare_op, bound=self.bound
)
return f"<Validator for x {self.compare_op} {self.bound}>"
def lt(val):
@ -549,11 +522,8 @@ class _MaxLengthValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) > self.max_length:
raise ValueError(
"Length of '{name}' must be <= {max}: {len}".format(
name=attr.name, max=self.max_length, len=len(value)
)
)
msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
raise ValueError(msg)
def __repr__(self):
return f"<max_len validator for {self.max_length}>"
@ -580,11 +550,8 @@ class _MinLengthValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) < self.min_length:
raise ValueError(
"Length of '{name}' must be => {min}: {len}".format(
name=attr.name, min=self.min_length, len=len(value)
)
)
msg = f"Length of '{attr.name}' must be => {self.min_length}: {len(value)}"
raise ValueError(msg)
def __repr__(self):
return f"<min_len validator for {self.min_length}>"
@ -611,22 +578,16 @@ class _SubclassOfValidator:
We use a callable class to be able to change the ``__repr__``.
"""
if not issubclass(value, self.type):
msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
raise TypeError(
"'{name}' must be a subclass of {type!r} "
"(got {value!r}).".format(
name=attr.name,
type=self.type,
value=value,
),
msg,
attr,
self.type,
value,
)
def __repr__(self):
return "<subclass_of validator for type {type!r}>".format(
type=self.type
)
return f"<subclass_of validator for type {self.type!r}>"
def _subclass_of(type):
@ -680,7 +641,7 @@ class _NotValidator:
def __repr__(self):
return (
"<not_ validator wrapping {what!r}, " "capturing {exc_types!r}>"
"<not_ validator wrapping {what!r}, capturing {exc_types!r}>"
).format(
what=self.validator,
exc_types=self.exc_types,