Updated script that can be controled by Nodejs web app

This commit is contained in:
mac OS
2024-11-25 12:24:18 +07:00
parent c440eda1f4
commit 8b0ab2bd3a
8662 changed files with 1803808 additions and 34 deletions

View File

@ -0,0 +1,103 @@
# SPDX-License-Identifier: MIT
"""
Classes Without Boilerplate
"""
from functools import partial
from typing import Callable
from . import converters, exceptions, filters, setters, validators
from ._cmp import cmp_using
from ._compat import Protocol
from ._config import get_run_validators, set_run_validators
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
from ._make import (
NOTHING,
Attribute,
Converter,
Factory,
attrib,
attrs,
fields,
fields_dict,
make_class,
validate,
)
from ._next_gen import define, field, frozen, mutable
from ._version_info import VersionInfo
s = attributes = attrs
ib = attr = attrib
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
class AttrsInstance(Protocol):
pass
__all__ = [
"Attribute",
"AttrsInstance",
"Converter",
"Factory",
"NOTHING",
"asdict",
"assoc",
"astuple",
"attr",
"attrib",
"attributes",
"attrs",
"cmp_using",
"converters",
"define",
"evolve",
"exceptions",
"field",
"fields",
"fields_dict",
"filters",
"frozen",
"get_run_validators",
"has",
"ib",
"make_class",
"mutable",
"resolve_types",
"s",
"set_run_validators",
"setters",
"validate",
"validators",
]
def _make_getattr(mod_name: str) -> Callable:
"""
Create a metadata proxy for packaging information that uses *mod_name* in
its warnings and errors.
"""
def __getattr__(name: str) -> str:
if name not in ("__version__", "__version_info__"):
msg = f"module {mod_name} has no attribute {name}"
raise AttributeError(msg)
try:
from importlib.metadata import metadata
except ImportError:
from importlib_metadata import metadata
meta = metadata("attrs")
if name == "__version_info__":
return VersionInfo._from_version_string(meta["version"])
return meta["version"]
return __getattr__
__getattr__ = _make_getattr(__name__)

View File

@ -0,0 +1,388 @@
import enum
import sys
from typing import (
Any,
Callable,
Generic,
Mapping,
Protocol,
Sequence,
TypeVar,
overload,
)
# `import X as X` is required to make these public
from . import converters as converters
from . import exceptions as exceptions
from . import filters as filters
from . import setters as setters
from . import validators as validators
from ._cmp import cmp_using as cmp_using
from ._typing_compat import AttrsInstance_
from ._version_info import VersionInfo
from attrs import (
define as define,
field as field,
mutable as mutable,
frozen as frozen,
_EqOrderType,
_ValidatorType,
_ConverterType,
_ReprArgType,
_OnSetAttrType,
_OnSetAttrArgType,
_FieldTransformer,
_ValidatorArgType,
)
if sys.version_info >= (3, 10):
from typing import TypeGuard
else:
from typing_extensions import TypeGuard
if sys.version_info >= (3, 11):
from typing import dataclass_transform
else:
from typing_extensions import dataclass_transform
__version__: str
__version_info__: VersionInfo
__title__: str
__description__: str
__url__: str
__uri__: str
__author__: str
__email__: str
__license__: str
__copyright__: str
_T = TypeVar("_T")
_C = TypeVar("_C", bound=type)
_FilterType = Callable[["Attribute[_T]", _T], bool]
# We subclass this here to keep the protocol's qualified name clean.
class AttrsInstance(AttrsInstance_, Protocol):
pass
_A = TypeVar("_A", bound=type[AttrsInstance])
class _Nothing(enum.Enum):
NOTHING = enum.auto()
NOTHING = _Nothing.NOTHING
# NOTE: Factory lies about its return type to make this possible:
# `x: List[int] # = Factory(list)`
# Work around mypy issue #4554 in the common case by using an overload.
if sys.version_info >= (3, 8):
from typing import Literal
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Callable[[Any], _T],
takes_self: Literal[True],
) -> _T: ...
@overload
def Factory(
factory: Callable[[], _T],
takes_self: Literal[False],
) -> _T: ...
else:
@overload
def Factory(factory: Callable[[], _T]) -> _T: ...
@overload
def Factory(
factory: Union[Callable[[Any], _T], Callable[[], _T]],
takes_self: bool = ...,
) -> _T: ...
In = TypeVar("In")
Out = TypeVar("Out")
class Converter(Generic[In, Out]):
@overload
def __init__(self, converter: Callable[[In], Out]) -> None: ...
@overload
def __init__(
self,
converter: Callable[[In, AttrsInstance, Attribute], Out],
*,
takes_self: Literal[True],
takes_field: Literal[True],
) -> None: ...
@overload
def __init__(
self,
converter: Callable[[In, Attribute], Out],
*,
takes_field: Literal[True],
) -> None: ...
@overload
def __init__(
self,
converter: Callable[[In, AttrsInstance], Out],
*,
takes_self: Literal[True],
) -> None: ...
class Attribute(Generic[_T]):
name: str
default: _T | None
validator: _ValidatorType[_T] | None
repr: _ReprArgType
cmp: _EqOrderType
eq: _EqOrderType
order: _EqOrderType
hash: bool | None
init: bool
converter: _ConverterType | Converter[Any, _T] | None
metadata: dict[Any, Any]
type: type[_T] | None
kw_only: bool
on_setattr: _OnSetAttrType
alias: str | None
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
# NOTE: We had several choices for the annotation to use for type arg:
# 1) Type[_T]
# - Pros: Handles simple cases correctly
# - Cons: Might produce less informative errors in the case of conflicting
# TypeVars e.g. `attr.ib(default='bad', type=int)`
# 2) Callable[..., _T]
# - Pros: Better error messages than #1 for conflicting TypeVars
# - Cons: Terrible error messages for validator checks.
# e.g. attr.ib(type=int, validator=validate_str)
# -> error: Cannot infer function type argument
# 3) type (and do all of the work in the mypy plugin)
# - Pros: Simple here, and we could customize the plugin with our own errors.
# - Cons: Would need to write mypy plugin code to handle all the cases.
# We chose option #1.
# `attr` lies about its return type to make the following possible:
# attr() -> Any
# attr(8) -> int
# attr(validator=<some callable>) -> Whatever the callable expects.
# This makes this type of assignments possible:
# x: int = attr(8)
#
# This form catches explicit None or no default but with no other arguments
# returns Any.
@overload
def attrib(
default: None = ...,
validator: None = ...,
repr: _ReprArgType = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
metadata: Mapping[Any, Any] | None = ...,
type: None = ...,
converter: None = ...,
factory: None = ...,
kw_only: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
alias: str | None = ...,
) -> Any: ...
# This form catches an explicit None or no default and infers the type from the
# other arguments.
@overload
def attrib(
default: None = ...,
validator: _ValidatorArgType[_T] | None = ...,
repr: _ReprArgType = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
metadata: Mapping[Any, Any] | None = ...,
type: type[_T] | None = ...,
converter: _ConverterType | Converter[Any, _T] | None = ...,
factory: Callable[[], _T] | None = ...,
kw_only: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
alias: str | None = ...,
) -> _T: ...
# This form catches an explicit default argument.
@overload
def attrib(
default: _T,
validator: _ValidatorArgType[_T] | None = ...,
repr: _ReprArgType = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
metadata: Mapping[Any, Any] | None = ...,
type: type[_T] | None = ...,
converter: _ConverterType | Converter[Any, _T] | None = ...,
factory: Callable[[], _T] | None = ...,
kw_only: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
alias: str | None = ...,
) -> _T: ...
# This form covers type=non-Type: e.g. forward references (str), Any
@overload
def attrib(
default: _T | None = ...,
validator: _ValidatorArgType[_T] | None = ...,
repr: _ReprArgType = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
metadata: Mapping[Any, Any] | None = ...,
type: object = ...,
converter: _ConverterType | Converter[Any, _T] | None = ...,
factory: Callable[[], _T] | None = ...,
kw_only: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
alias: str | None = ...,
) -> Any: ...
@overload
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
def attrs(
maybe_cls: _C,
these: dict[str, Any] | None = ...,
repr_ns: str | None = ...,
repr: bool = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: bool | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
field_transformer: _FieldTransformer | None = ...,
match_args: bool = ...,
unsafe_hash: bool | None = ...,
) -> _C: ...
@overload
@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
def attrs(
maybe_cls: None = ...,
these: dict[str, Any] | None = ...,
repr_ns: str | None = ...,
repr: bool = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
auto_detect: bool = ...,
collect_by_mro: bool = ...,
getstate_setstate: bool | None = ...,
on_setattr: _OnSetAttrArgType | None = ...,
field_transformer: _FieldTransformer | None = ...,
match_args: bool = ...,
unsafe_hash: bool | None = ...,
) -> Callable[[_C], _C]: ...
def fields(cls: type[AttrsInstance]) -> Any: ...
def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
def validate(inst: AttrsInstance) -> None: ...
def resolve_types(
cls: _A,
globalns: dict[str, Any] | None = ...,
localns: dict[str, Any] | None = ...,
attribs: list[Attribute[Any]] | None = ...,
include_extras: bool = ...,
) -> _A: ...
# TODO: add support for returning a proper attrs class from the mypy plugin
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
# [attr.ib()])` is valid
def make_class(
name: str,
attrs: list[str] | tuple[str, ...] | dict[str, Any],
bases: tuple[type, ...] = ...,
class_body: dict[str, Any] | None = ...,
repr_ns: str | None = ...,
repr: bool = ...,
cmp: _EqOrderType | None = ...,
hash: bool | None = ...,
init: bool = ...,
slots: bool = ...,
frozen: bool = ...,
weakref_slot: bool = ...,
str: bool = ...,
auto_attribs: bool = ...,
kw_only: bool = ...,
cache_hash: bool = ...,
auto_exc: bool = ...,
eq: _EqOrderType | None = ...,
order: _EqOrderType | None = ...,
collect_by_mro: bool = ...,
on_setattr: _OnSetAttrArgType | None = ...,
field_transformer: _FieldTransformer | None = ...,
) -> type: ...
# _funcs --
# TODO: add support for returning TypedDict from the mypy plugin
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
# these:
# https://github.com/python/mypy/issues/4236
# https://github.com/python/typing/issues/253
# XXX: remember to fix attrs.asdict/astuple too!
def asdict(
inst: AttrsInstance,
recurse: bool = ...,
filter: _FilterType[Any] | None = ...,
dict_factory: type[Mapping[Any, Any]] = ...,
retain_collection_types: bool = ...,
value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
tuple_keys: bool | None = ...,
) -> dict[str, Any]: ...
# TODO: add support for returning NamedTuple from the mypy plugin
def astuple(
inst: AttrsInstance,
recurse: bool = ...,
filter: _FilterType[Any] | None = ...,
tuple_factory: type[Sequence[Any]] = ...,
retain_collection_types: bool = ...,
) -> tuple[Any, ...]: ...
def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
def assoc(inst: _T, **changes: Any) -> _T: ...
def evolve(inst: _T, **changes: Any) -> _T: ...
# _config --
def set_run_validators(run: bool) -> None: ...
def get_run_validators() -> bool: ...
# aliases --
s = attributes = attrs
ib = attr = attrib
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)

View File

@ -0,0 +1,160 @@
# SPDX-License-Identifier: MIT
import functools
import types
from ._make import _make_ne
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
def cmp_using(
eq=None,
lt=None,
le=None,
gt=None,
ge=None,
require_same_type=True,
class_name="Comparable",
):
"""
Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
and ``cmp`` arguments to customize field comparison.
The resulting class will have a full set of ordering methods if at least
one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
Args:
eq (typing.Callable | None):
Callable used to evaluate equality of two objects.
lt (typing.Callable | None):
Callable used to evaluate whether one object is less than another
object.
le (typing.Callable | None):
Callable used to evaluate whether one object is less than or equal
to another object.
gt (typing.Callable | None):
Callable used to evaluate whether one object is greater than
another object.
ge (typing.Callable | None):
Callable used to evaluate whether one object is greater than or
equal to another object.
require_same_type (bool):
When `True`, equality and ordering methods will return
`NotImplemented` if objects are not of the same type.
class_name (str | None): Name of class. Defaults to "Comparable".
See `comparison` for more details.
.. versionadded:: 21.1.0
"""
body = {
"__slots__": ["value"],
"__init__": _make_init(),
"_requirements": [],
"_is_comparable_to": _is_comparable_to,
}
# Add operations.
num_order_functions = 0
has_eq_function = False
if eq is not None:
has_eq_function = True
body["__eq__"] = _make_operator("eq", eq)
body["__ne__"] = _make_ne()
if lt is not None:
num_order_functions += 1
body["__lt__"] = _make_operator("lt", lt)
if le is not None:
num_order_functions += 1
body["__le__"] = _make_operator("le", le)
if gt is not None:
num_order_functions += 1
body["__gt__"] = _make_operator("gt", gt)
if ge is not None:
num_order_functions += 1
body["__ge__"] = _make_operator("ge", ge)
type_ = types.new_class(
class_name, (object,), {}, lambda ns: ns.update(body)
)
# Add same type requirement.
if require_same_type:
type_._requirements.append(_check_same_type)
# Add total ordering if at least one operation was defined.
if 0 < num_order_functions < 4:
if not has_eq_function:
# functools.total_ordering requires __eq__ to be defined,
# so raise early error here to keep a nice stack.
msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
raise ValueError(msg)
type_ = functools.total_ordering(type_)
return type_
def _make_init():
"""
Create __init__ method.
"""
def __init__(self, value):
"""
Initialize object with *value*.
"""
self.value = value
return __init__
def _make_operator(name, func):
"""
Create operator method.
"""
def method(self, other):
if not self._is_comparable_to(other):
return NotImplemented
result = func(self.value, other.value)
if result is NotImplemented:
return NotImplemented
return result
method.__name__ = f"__{name}__"
method.__doc__ = (
f"Return a {_operation_names[name]} b. Computed by attrs."
)
return method
def _is_comparable_to(self, other):
"""
Check whether `other` is comparable to `self`.
"""
return all(func(self, other) for func in self._requirements)
def _check_same_type(self, other):
"""
Return True if *self* and *other* are of the same type, False otherwise.
"""
return other.value.__class__ is self.value.__class__

View File

@ -0,0 +1,13 @@
from typing import Any, Callable
_CompareWithType = Callable[[Any, Any], bool]
def cmp_using(
eq: _CompareWithType | None = ...,
lt: _CompareWithType | None = ...,
le: _CompareWithType | None = ...,
gt: _CompareWithType | None = ...,
ge: _CompareWithType | None = ...,
require_same_type: bool = ...,
class_name: str = ...,
) -> type: ...

View File

@ -0,0 +1,103 @@
# SPDX-License-Identifier: MIT
import inspect
import platform
import sys
import threading
from collections.abc import Mapping, Sequence # noqa: F401
from typing import _GenericAlias
PYPY = platform.python_implementation() == "PyPy"
PY_3_8_PLUS = sys.version_info[:2] >= (3, 8)
PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
if sys.version_info < (3, 8):
try:
from typing_extensions import Protocol
except ImportError: # pragma: no cover
Protocol = object
else:
from typing import Protocol # noqa: F401
if PY_3_14_PLUS: # pragma: no cover
import annotationlib
_get_annotations = annotationlib.get_annotations
else:
def _get_annotations(cls):
"""
Get annotations for *cls*.
"""
return cls.__dict__.get("__annotations__", {})
class _AnnotationExtractor:
"""
Extract type annotations from a callable, returning None whenever there
is none.
"""
__slots__ = ["sig"]
def __init__(self, callable):
try:
self.sig = inspect.signature(callable)
except (ValueError, TypeError): # inspect failed
self.sig = None
def get_first_param_type(self):
"""
Return the type annotation of the first argument if it's not empty.
"""
if not self.sig:
return None
params = list(self.sig.parameters.values())
if params and params[0].annotation is not inspect.Parameter.empty:
return params[0].annotation
return None
def get_return_type(self):
"""
Return the return type if it's not empty.
"""
if (
self.sig
and self.sig.return_annotation is not inspect.Signature.empty
):
return self.sig.return_annotation
return None
# Thread-local global to track attrs instances which are already being repr'd.
# This is needed because there is no other (thread-safe) way to pass info
# about the instances that are already being repr'd through the call stack
# in order to ensure we don't perform infinite recursion.
#
# For instance, if an instance contains a dict which contains that instance,
# we need to know that we're already repr'ing the outside instance from within
# the dict's repr() call.
#
# This lives here rather than in _make.py so that the functions in _make.py
# don't have a direct reference to the thread-local in their globals dict.
# If they have such a reference, it breaks cloudpickle.
repr_context = threading.local()
def get_generic_base(cl):
"""If this is a generic class (A[str]), return the generic base for it."""
if cl.__class__ is _GenericAlias:
return cl.__origin__
return None

View File

@ -0,0 +1,31 @@
# SPDX-License-Identifier: MIT
__all__ = ["set_run_validators", "get_run_validators"]
_run_validators = True
def set_run_validators(run):
"""
Set whether or not validators are run. By default, they are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
instead.
"""
if not isinstance(run, bool):
msg = "'run' must be bool."
raise TypeError(msg)
global _run_validators
_run_validators = run
def get_run_validators():
"""
Return whether or not validators are run.
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
instead.
"""
return _run_validators

View File

@ -0,0 +1,522 @@
# SPDX-License-Identifier: MIT
import copy
from ._compat import PY_3_9_PLUS, get_generic_base
from ._make import _OBJ_SETATTR, NOTHING, fields
from .exceptions import AttrsAttributeNotFoundError
def asdict(
inst,
recurse=True,
filter=None,
dict_factory=dict,
retain_collection_types=False,
value_serializer=None,
):
"""
Return the *attrs* attribute values of *inst* as a dict.
Optionally recurse into other *attrs*-decorated classes.
Args:
inst: Instance of an *attrs*-decorated class.
recurse (bool): Recurse into classes that are also *attrs*-decorated.
filter (~typing.Callable):
A callable whose return code determines whether an attribute or
element is included (`True`) or dropped (`False`). Is called with
the `attrs.Attribute` as the first argument and the value as the
second argument.
dict_factory (~typing.Callable):
A callable to produce dictionaries from. For example, to produce
ordered dictionaries instead of normal Python dictionaries, pass in
``collections.OrderedDict``.
retain_collection_types (bool):
Do not convert to `list` when encountering an attribute whose type
is `tuple` or `set`. Only meaningful if *recurse* is `True`.
value_serializer (typing.Callable | None):
A hook that is called for every attribute or dict key/value. It
receives the current instance, field and value and must return the
(updated) value. The hook is run *after* the optional *filter* has
been applied.
Returns:
Return type of *dict_factory*.
Raises:
attrs.exceptions.NotAnAttrsClassError:
If *cls* is not an *attrs* class.
.. versionadded:: 16.0.0 *dict_factory*
.. versionadded:: 16.1.0 *retain_collection_types*
.. versionadded:: 20.3.0 *value_serializer*
.. versionadded:: 21.3.0
If a dict has a collection for a key, it is serialized as a tuple.
"""
attrs = fields(inst.__class__)
rv = dict_factory()
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if value_serializer is not None:
v = value_serializer(inst, a, v)
if recurse is True:
if has(v.__class__):
rv[a.name] = asdict(
v,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain_collection_types is True else list
items = [
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in v
]
try:
rv[a.name] = cf(items)
except TypeError:
if not issubclass(cf, tuple):
raise
# Workaround for TypeError: cf.__new__() missing 1 required
# positional argument (which appears, for a namedturle)
rv[a.name] = cf(*items)
elif isinstance(v, dict):
df = dict_factory
rv[a.name] = df(
(
_asdict_anything(
kk,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in v.items()
)
else:
rv[a.name] = v
else:
rv[a.name] = v
return rv
def _asdict_anything(
val,
is_key,
filter,
dict_factory,
retain_collection_types,
value_serializer,
):
"""
``asdict`` only works on attrs instances, this works on anything.
"""
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
# Attrs class.
rv = asdict(
val,
recurse=True,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
elif isinstance(val, (tuple, list, set, frozenset)):
if retain_collection_types is True:
cf = val.__class__
elif is_key:
cf = tuple
else:
cf = list
rv = cf(
[
_asdict_anything(
i,
is_key=False,
filter=filter,
dict_factory=dict_factory,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
)
for i in val
]
)
elif isinstance(val, dict):
df = dict_factory
rv = df(
(
_asdict_anything(
kk,
is_key=True,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
_asdict_anything(
vv,
is_key=False,
filter=filter,
dict_factory=df,
retain_collection_types=retain_collection_types,
value_serializer=value_serializer,
),
)
for kk, vv in val.items()
)
else:
rv = val
if value_serializer is not None:
rv = value_serializer(None, None, rv)
return rv
def astuple(
inst,
recurse=True,
filter=None,
tuple_factory=tuple,
retain_collection_types=False,
):
"""
Return the *attrs* attribute values of *inst* as a tuple.
Optionally recurse into other *attrs*-decorated classes.
Args:
inst: Instance of an *attrs*-decorated class.
recurse (bool):
Recurse into classes that are also *attrs*-decorated.
filter (~typing.Callable):
A callable whose return code determines whether an attribute or
element is included (`True`) or dropped (`False`). Is called with
the `attrs.Attribute` as the first argument and the value as the
second argument.
tuple_factory (~typing.Callable):
A callable to produce tuples from. For example, to produce lists
instead of tuples.
retain_collection_types (bool):
Do not convert to `list` or `dict` when encountering an attribute
which type is `tuple`, `dict` or `set`. Only meaningful if
*recurse* is `True`.
Returns:
Return type of *tuple_factory*
Raises:
attrs.exceptions.NotAnAttrsClassError:
If *cls* is not an *attrs* class.
.. versionadded:: 16.2.0
"""
attrs = fields(inst.__class__)
rv = []
retain = retain_collection_types # Very long. :/
for a in attrs:
v = getattr(inst, a.name)
if filter is not None and not filter(a, v):
continue
if recurse is True:
if has(v.__class__):
rv.append(
astuple(
v,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
)
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
items = [
(
astuple(
j,
recurse=True,
filter=filter,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(j.__class__)
else j
)
for j in v
]
try:
rv.append(cf(items))
except TypeError:
if not issubclass(cf, tuple):
raise
# Workaround for TypeError: cf.__new__() missing 1 required
# positional argument (which appears, for a namedturle)
rv.append(cf(*items))
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
(
astuple(
kk,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(kk.__class__)
else kk
),
(
astuple(
vv,
tuple_factory=tuple_factory,
retain_collection_types=retain,
)
if has(vv.__class__)
else vv
),
)
for kk, vv in v.items()
)
)
else:
rv.append(v)
else:
rv.append(v)
return rv if tuple_factory is list else tuple_factory(rv)
def has(cls):
"""
Check whether *cls* is a class with *attrs* attributes.
Args:
cls (type): Class to introspect.
Raises:
TypeError: If *cls* is not a class.
Returns:
bool:
"""
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is not None:
return True
# No attrs, maybe it's a specialized generic (A[str])?
generic_base = get_generic_base(cls)
if generic_base is not None:
generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
if generic_attrs is not None:
# Stick it on here for speed next time.
cls.__attrs_attrs__ = generic_attrs
return generic_attrs is not None
return False
def assoc(inst, **changes):
"""
Copy *inst* and apply *changes*.
This is different from `evolve` that applies the changes to the arguments
that create the new instance.
`evolve`'s behavior is preferable, but there are `edge cases`_ where it
doesn't work. Therefore `assoc` is deprecated, but will not be removed.
.. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
Args:
inst: Instance of a class with *attrs* attributes.
changes: Keyword changes in the new copy.
Returns:
A copy of inst with *changes* incorporated.
Raises:
attrs.exceptions.AttrsAttributeNotFoundError:
If *attr_name* couldn't be found on *cls*.
attrs.exceptions.NotAnAttrsClassError:
If *cls* is not an *attrs* class.
.. deprecated:: 17.1.0
Use `attrs.evolve` instead if you can. This function will not be
removed du to the slightly different approach compared to
`attrs.evolve`, though.
"""
new = copy.copy(inst)
attrs = fields(inst.__class__)
for k, v in changes.items():
a = getattr(attrs, k, NOTHING)
if a is NOTHING:
msg = f"{k} is not an attrs attribute on {new.__class__}."
raise AttrsAttributeNotFoundError(msg)
_OBJ_SETATTR(new, k, v)
return new
def evolve(*args, **changes):
"""
Create a new instance, based on the first positional argument with
*changes* applied.
Args:
inst:
Instance of a class with *attrs* attributes. *inst* must be passed
as a positional argument.
changes:
Keyword changes in the new copy.
Returns:
A copy of inst with *changes* incorporated.
Raises:
TypeError:
If *attr_name* couldn't be found in the class ``__init__``.
attrs.exceptions.NotAnAttrsClassError:
If *cls* is not an *attrs* class.
.. versionadded:: 17.1.0
.. deprecated:: 23.1.0
It is now deprecated to pass the instance using the keyword argument
*inst*. It will raise a warning until at least April 2024, after which
it will become an error. Always pass the instance as a positional
argument.
.. versionchanged:: 24.1.0
*inst* can't be passed as a keyword argument anymore.
"""
try:
(inst,) = args
except ValueError:
msg = (
f"evolve() takes 1 positional argument, but {len(args)} were given"
)
raise TypeError(msg) from None
cls = inst.__class__
attrs = fields(cls)
for a in attrs:
if not a.init:
continue
attr_name = a.name # To deal with private attributes.
init_name = a.alias
if init_name not in changes:
changes[init_name] = getattr(inst, attr_name)
return cls(**changes)
def resolve_types(
cls, globalns=None, localns=None, attribs=None, include_extras=True
):
"""
Resolve any strings and forward annotations in type annotations.
This is only required if you need concrete types in :class:`Attribute`'s
*type* field. In other words, you don't need to resolve your types if you
only use them for static type checking.
With no arguments, names will be looked up in the module in which the class
was created. If this is not what you want, for example, if the name only
exists inside a method, you may pass *globalns* or *localns* to specify
other dictionaries in which to look up these names. See the docs of
`typing.get_type_hints` for more details.
Args:
cls (type): Class to resolve.
globalns (dict | None): Dictionary containing global variables.
localns (dict | None): Dictionary containing local variables.
attribs (list | None):
List of attribs for the given class. This is necessary when calling
from inside a ``field_transformer`` since *cls* is not an *attrs*
class yet.
include_extras (bool):
Resolve more accurately, if possible. Pass ``include_extras`` to
``typing.get_hints``, if supported by the typing module. On
supported Python versions (3.9+), this resolves the types more
accurately.
Raises:
TypeError: If *cls* is not a class.
attrs.exceptions.NotAnAttrsClassError:
If *cls* is not an *attrs* class and you didn't pass any attribs.
NameError: If types cannot be resolved because of missing variables.
Returns:
*cls* so you can use this function also as a class decorator. Please
note that you have to apply it **after** `attrs.define`. That means the
decorator has to come in the line **before** `attrs.define`.
.. versionadded:: 20.1.0
.. versionadded:: 21.1.0 *attribs*
.. versionadded:: 23.1.0 *include_extras*
"""
# Since calling get_type_hints is expensive we cache whether we've
# done it already.
if getattr(cls, "__attrs_types_resolved__", None) != cls:
import typing
kwargs = {"globalns": globalns, "localns": localns}
if PY_3_9_PLUS:
kwargs["include_extras"] = include_extras
hints = typing.get_type_hints(cls, **kwargs)
for field in fields(cls) if attribs is None else attribs:
if field.name in hints:
# Since fields have been frozen we must work around it.
_OBJ_SETATTR(field, "type", hints[field.name])
# We store the class we resolved so that subclasses know they haven't
# been resolved.
cls.__attrs_types_resolved__ = cls
# Return the class so you can use it as a decorator too.
return cls

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,631 @@
# SPDX-License-Identifier: MIT
"""
These are keyword-only APIs that call `attr.s` and `attr.ib` with different
default values.
"""
from functools import partial
from . import setters
from ._funcs import asdict as _asdict
from ._funcs import astuple as _astuple
from ._make import (
_DEFAULT_ON_SETATTR,
NOTHING,
_frozen_setattrs,
attrib,
attrs,
)
from .exceptions import UnannotatedAttributeError
def define(
maybe_cls=None,
*,
these=None,
repr=None,
unsafe_hash=None,
hash=None,
init=None,
slots=True,
frozen=False,
weakref_slot=True,
str=False,
auto_attribs=None,
kw_only=False,
cache_hash=False,
auto_exc=True,
eq=None,
order=False,
auto_detect=True,
getstate_setstate=None,
on_setattr=None,
field_transformer=None,
match_args=True,
):
r"""
A class decorator that adds :term:`dunder methods` according to
:term:`fields <field>` specified using :doc:`type annotations <types>`,
`field()` calls, or the *these* argument.
Since *attrs* patches or replaces an existing class, you cannot use
`object.__init_subclass__` with *attrs* classes, because it runs too early.
As a replacement, you can define ``__attrs_init_subclass__`` on your class.
It will be called by *attrs* classes that subclass it after they're
created. See also :ref:`init-subclass`.
Args:
slots (bool):
Create a :term:`slotted class <slotted classes>` that's more
memory-efficient. Slotted classes are generally superior to the
default dict classes, but have some gotchas you should know about,
so we encourage you to read the :term:`glossary entry <slotted
classes>`.
auto_detect (bool):
Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
explicitly, assume they are set to True **unless any** of the
involved methods for one of the arguments is implemented in the
*current* class (meaning, it is *not* inherited from some base
class).
So, for example by implementing ``__eq__`` on a class yourself,
*attrs* will deduce ``eq=False`` and will create *neither*
``__eq__`` *nor* ``__ne__`` (but Python classes come with a
sensible ``__ne__`` by default, so it *should* be enough to only
implement ``__eq__`` in most cases).
Passing True or False` to *init*, *repr*, *eq*, *cmp*, or *hash*
overrides whatever *auto_detect* would determine.
auto_exc (bool):
If the class subclasses `BaseException` (which implicitly includes
any subclass of any exception), the following happens to behave
like a well-behaved Python exception class:
- the values for *eq*, *order*, and *hash* are ignored and the
instances compare and hash by the instance's ids [#]_ ,
- all attributes that are either passed into ``__init__`` or have a
default value are additionally available as a tuple in the
``args`` attribute,
- the value of *str* is ignored leaving ``__str__`` to base
classes.
.. [#]
Note that *attrs* will *not* remove existing implementations of
``__hash__`` or the equality methods. It just won't add own
ones.
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
A callable that is run whenever the user attempts to set an
attribute (either by assignment like ``i.x = 42`` or by using
`setattr` like ``setattr(i, "x", 42)``). It receives the same
arguments as validators: the instance, the attribute that is being
modified, and the new value.
If no exception is raised, the attribute is set to the return value
of the callable.
If a list of callables is passed, they're automatically wrapped in
an `attrs.setters.pipe`.
If left None, the default behavior is to run converters and
validators whenever an attribute is set.
init (bool):
Create a ``__init__`` method that initializes the *attrs*
attributes. Leading underscores are stripped for the argument name,
unless an alias is set on the attribute.
.. seealso::
`init` shows advanced ways to customize the generated
``__init__`` method, including executing code before and after.
repr(bool):
Create a ``__repr__`` method with a human readable representation
of *attrs* attributes.
str (bool):
Create a ``__str__`` method that is identical to ``__repr__``. This
is usually not necessary except for `Exception`\ s.
eq (bool | None):
If True or None (default), add ``__eq__`` and ``__ne__`` methods
that check two instances for equality.
.. seealso::
`comparison` describes how to customize the comparison behavior
going as far comparing NumPy arrays.
order (bool | None):
If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
methods that behave like *eq* above and allow instances to be
ordered.
They compare the instances as if they were tuples of their *attrs*
attributes if and only if the types of both classes are
*identical*.
If `None` mirror value of *eq*.
.. seealso:: `comparison`
cmp (bool | None):
Setting *cmp* is equivalent to setting *eq* and *order* to the same
value. Must not be mixed with *eq* or *order*.
unsafe_hash (bool | None):
If None (default), the ``__hash__`` method is generated according
how *eq* and *frozen* are set.
1. If *both* are True, *attrs* will generate a ``__hash__`` for
you.
2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
to None, marking it unhashable (which it is).
3. If *eq* is False, ``__hash__`` will be left untouched meaning
the ``__hash__`` method of the base class will be used. If the
base class is `object`, this means it will fall back to id-based
hashing.
Although not recommended, you can decide for yourself and force
*attrs* to create one (for example, if the class is immutable even
though you didn't freeze it programmatically) by passing True or
not. Both of these cases are rather special and should be used
carefully.
.. seealso::
- Our documentation on `hashing`,
- Python's documentation on `object.__hash__`,
- and the `GitHub issue that led to the default \ behavior
<https://github.com/python-attrs/attrs/issues/136>`_ for more
details.
hash (bool | None):
Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
cache_hash (bool):
Ensure that the object's hash code is computed only once and stored
on the object. If this is set to True, hashing must be either
explicitly or implicitly enabled for this class. If the hash code
is cached, avoid any reassignments of fields involved in hash code
computation or mutations of the objects those fields point to after
object creation. If such changes occur, the behavior of the
object's hash code is undefined.
frozen (bool):
Make instances immutable after initialization. If someone attempts
to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
is raised.
.. note::
1. This is achieved by installing a custom ``__setattr__``
method on your class, so you can't implement your own.
2. True immutability is impossible in Python.
3. This *does* have a minor a runtime performance `impact
<how-frozen>` when initializing new instances. In other
words: ``__init__`` is slightly slower with ``frozen=True``.
4. If a class is frozen, you cannot modify ``self`` in
``__attrs_post_init__`` or a self-written ``__init__``. You
can circumvent that limitation by using
``object.__setattr__(self, "attribute_name", value)``.
5. Subclasses of a frozen class are frozen too.
kw_only (bool):
Make all attributes keyword-only in the generated ``__init__`` (if
*init* is False, this parameter is ignored).
weakref_slot (bool):
Make instances weak-referenceable. This has no effect unless
*slots* is True.
field_transformer (~typing.Callable | None):
A function that is called with the original class object and all
fields right before *attrs* finalizes the class. You can use this,
for example, to automatically add converters or validators to
fields based on their types.
.. seealso:: `transform-fields`
match_args (bool):
If True (default), set ``__match_args__`` on the class to support
:pep:`634` (*Structural Pattern Matching*). It is a tuple of all
non-keyword-only ``__init__`` parameter names on Python 3.10 and
later. Ignored on older Python versions.
collect_by_mro (bool):
If True, *attrs* collects attributes from base classes correctly
according to the `method resolution order
<https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
See also `issue #428
<https://github.com/python-attrs/attrs/issues/428>`_.
getstate_setstate (bool | None):
.. note::
This is usually only interesting for slotted classes and you
should probably just set *auto_detect* to True.
If True, ``__getstate__`` and ``__setstate__`` are generated and
attached to the class. This is necessary for slotted classes to be
pickleable. If left None, it's True by default for slotted classes
and False for dict classes.
If *auto_detect* is True, and *getstate_setstate* is left None, and
**either** ``__getstate__`` or ``__setstate__`` is detected
directly on the class (meaning: not inherited), it is set to False
(this is usually what you want).
auto_attribs (bool | None):
If True, look at type annotations to determine which attributes to
use, like `dataclasses`. If False, it will only look for explicit
:func:`field` class attributes, like classic *attrs*.
If left None, it will guess:
1. If any attributes are annotated and no unannotated
`attrs.field`\ s are found, it assumes *auto_attribs=True*.
2. Otherwise it assumes *auto_attribs=False* and tries to collect
`attrs.field`\ s.
If *attrs* decides to look at type annotations, **all** fields
**must** be annotated. If *attrs* encounters a field that is set to
a :func:`field` / `attr.ib` but lacks a type annotation, an
`attrs.exceptions.UnannotatedAttributeError` is raised. Use
``field_name: typing.Any = field(...)`` if you don't want to set a
type.
.. warning::
For features that use the attribute name to create decorators
(for example, :ref:`validators <validators>`), you still *must*
assign :func:`field` / `attr.ib` to them. Otherwise Python will
either not find the name or try to use the default value to
call, for example, ``validator`` on it.
Attributes annotated as `typing.ClassVar`, and attributes that are
neither annotated nor set to an `field()` are **ignored**.
these (dict[str, object]):
A dictionary of name to the (private) return value of `field()`
mappings. This is useful to avoid the definition of your attributes
within the class body because you can't (for example, if you want
to add ``__repr__`` methods to Django models) or don't want to.
If *these* is not `None`, *attrs* will *not* search the class body
for attributes and will *not* remove any attributes from it.
The order is deduced from the order of the attributes inside
*these*.
Arguably, this is a rather obscure feature.
.. versionadded:: 20.1.0
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
.. versionadded:: 22.2.0
*unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
.. versionchanged:: 24.1.0
Instances are not compared as tuples of attributes anymore, but using a
big ``and`` condition. This is faster and has more correct behavior for
uncomparable values like `math.nan`.
.. versionadded:: 24.1.0
If a class has an *inherited* classmethod called
``__attrs_init_subclass__``, it is executed after the class is created.
.. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
.. note::
The main differences to the classic `attr.s` are:
- Automatically detect whether or not *auto_attribs* should be `True`
(c.f. *auto_attribs* parameter).
- Converters and validators run when attributes are set by default --
if *frozen* is `False`.
- *slots=True*
Usually, this has only upsides and few visible effects in everyday
programming. But it *can* lead to some surprising behaviors, so
please make sure to read :term:`slotted classes`.
- *auto_exc=True*
- *auto_detect=True*
- *order=False*
- Some options that were only relevant on Python 2 or were kept around
for backwards-compatibility have been removed.
"""
def do_it(cls, auto_attribs):
return attrs(
maybe_cls=cls,
these=these,
repr=repr,
hash=hash,
unsafe_hash=unsafe_hash,
init=init,
slots=slots,
frozen=frozen,
weakref_slot=weakref_slot,
str=str,
auto_attribs=auto_attribs,
kw_only=kw_only,
cache_hash=cache_hash,
auto_exc=auto_exc,
eq=eq,
order=order,
auto_detect=auto_detect,
collect_by_mro=True,
getstate_setstate=getstate_setstate,
on_setattr=on_setattr,
field_transformer=field_transformer,
match_args=match_args,
)
def wrap(cls):
"""
Making this a wrapper ensures this code runs during class creation.
We also ensure that frozen-ness of classes is inherited.
"""
nonlocal frozen, on_setattr
had_on_setattr = on_setattr not in (None, setters.NO_OP)
# By default, mutable classes convert & validate on setattr.
if frozen is False and on_setattr is None:
on_setattr = _DEFAULT_ON_SETATTR
# However, if we subclass a frozen class, we inherit the immutability
# and disable on_setattr.
for base_cls in cls.__bases__:
if base_cls.__setattr__ is _frozen_setattrs:
if had_on_setattr:
msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
raise ValueError(msg)
on_setattr = setters.NO_OP
break
if auto_attribs is not None:
return do_it(cls, auto_attribs)
try:
return do_it(cls, True)
except UnannotatedAttributeError:
return do_it(cls, False)
# maybe_cls's type depends on the usage of the decorator. It's a class
# if it's used as `@attrs` but `None` if used as `@attrs()`.
if maybe_cls is None:
return wrap
return wrap(maybe_cls)
mutable = define
frozen = partial(define, frozen=True, on_setattr=None)
def field(
*,
default=NOTHING,
validator=None,
repr=True,
hash=None,
init=True,
metadata=None,
type=None,
converter=None,
factory=None,
kw_only=False,
eq=None,
order=None,
on_setattr=None,
alias=None,
):
"""
Create a new :term:`field` / :term:`attribute` on a class.
.. warning::
Does **nothing** unless the class is also decorated with
`attrs.define` (or similar)!
Args:
default:
A value that is used if an *attrs*-generated ``__init__`` is used
and no value is passed while instantiating or the attribute is
excluded using ``init=False``.
If the value is an instance of `attrs.Factory`, its callable will
be used to construct a new value (useful for mutable data types
like lists or dicts).
If a default is not set (or set manually to `attrs.NOTHING`), a
value *must* be supplied when instantiating; otherwise a
`TypeError` will be raised.
.. seealso:: `defaults`
factory (~typing.Callable):
Syntactic sugar for ``default=attr.Factory(factory)``.
validator (~typing.Callable | list[~typing.Callable]):
Callable that is called by *attrs*-generated ``__init__`` methods
after the instance has been initialized. They receive the
initialized instance, the :func:`~attrs.Attribute`, and the passed
value.
The return value is *not* inspected so the validator has to throw
an exception itself.
If a `list` is passed, its items are treated as validators and must
all pass.
Validators can be globally disabled and re-enabled using
`attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
The validator can also be set using decorator notation as shown
below.
.. seealso:: :ref:`validators`
repr (bool | ~typing.Callable):
Include this attribute in the generated ``__repr__`` method. If
True, include the attribute; if False, omit it. By default, the
built-in ``repr()`` function is used. To override how the attribute
value is formatted, pass a ``callable`` that takes a single value
and returns a string. Note that the resulting string is used as-is,
which means it will be used directly *instead* of calling
``repr()`` (the default).
eq (bool | ~typing.Callable):
If True (default), include this attribute in the generated
``__eq__`` and ``__ne__`` methods that check two instances for
equality. To override how the attribute value is compared, pass a
callable that takes a single value and returns the value to be
compared.
.. seealso:: `comparison`
order (bool | ~typing.Callable):
If True (default), include this attributes in the generated
``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
override how the attribute value is ordered, pass a callable that
takes a single value and returns the value to be ordered.
.. seealso:: `comparison`
cmp(bool | ~typing.Callable):
Setting *cmp* is equivalent to setting *eq* and *order* to the same
value. Must not be mixed with *eq* or *order*.
.. seealso:: `comparison`
hash (bool | None):
Include this attribute in the generated ``__hash__`` method. If
None (default), mirror *eq*'s value. This is the correct behavior
according the Python spec. Setting this value to anything else
than None is *discouraged*.
.. seealso:: `hashing`
init (bool):
Include this attribute in the generated ``__init__`` method.
It is possible to set this to False and set a default value. In
that case this attributed is unconditionally initialized with the
specified default value or factory.
.. seealso:: `init`
converter (typing.Callable | Converter):
A callable that is called by *attrs*-generated ``__init__`` methods
to convert attribute's value to the desired format.
If a vanilla callable is passed, it is given the passed-in value as
the only positional argument. It is possible to receive additional
arguments by wrapping the callable in a `Converter`.
Either way, the returned value will be used as the new value of the
attribute. The value is converted before being passed to the
validator, if any.
.. seealso:: :ref:`converters`
metadata (dict | None):
An arbitrary mapping, to be used by third-party code.
.. seealso:: `extending-metadata`.
type (type):
The type of the attribute. Nowadays, the preferred method to
specify the type is using a variable annotation (see :pep:`526`).
This argument is provided for backwards-compatibility and for usage
with `make_class`. Regardless of the approach used, the type will
be stored on ``Attribute.type``.
Please note that *attrs* doesn't do anything with this metadata by
itself. You can use it as part of your own code or for `static type
checking <types>`.
kw_only (bool):
Make this attribute keyword-only in the generated ``__init__`` (if
``init`` is False, this parameter is ignored).
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
Allows to overwrite the *on_setattr* setting from `attr.s`. If left
None, the *on_setattr* value from `attr.s` is used. Set to
`attrs.setters.NO_OP` to run **no** `setattr` hooks for this
attribute -- regardless of the setting in `define()`.
alias (str | None):
Override this attribute's parameter name in the generated
``__init__`` method. If left None, default to ``name`` stripped
of leading underscores. See `private-attributes`.
.. versionadded:: 20.1.0
.. versionchanged:: 21.1.0
*eq*, *order*, and *cmp* also accept a custom callable
.. versionadded:: 22.2.0 *alias*
.. versionadded:: 23.1.0
The *type* parameter has been re-added; mostly for `attrs.make_class`.
Please note that type checkers ignore this metadata.
.. seealso::
`attr.ib`
"""
return attrib(
default=default,
validator=validator,
repr=repr,
hash=hash,
init=init,
metadata=metadata,
type=type,
converter=converter,
factory=factory,
kw_only=kw_only,
eq=eq,
order=order,
on_setattr=on_setattr,
alias=alias,
)
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
"""
Same as `attr.asdict`, except that collections types are always retained
and dict is always used as *dict_factory*.
.. versionadded:: 21.3.0
"""
return _asdict(
inst=inst,
recurse=recurse,
filter=filter,
value_serializer=value_serializer,
retain_collection_types=True,
)
def astuple(inst, *, recurse=True, filter=None):
"""
Same as `attr.astuple`, except that collections types are always retained
and `tuple` is always used as the *tuple_factory*.
.. versionadded:: 21.3.0
"""
return _astuple(
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
)

View File

@ -0,0 +1,15 @@
from typing import Any, ClassVar, Protocol
# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
MYPY = False
if MYPY:
# A protocol to be able to statically accept an attrs class.
class AttrsInstance_(Protocol):
__attrs_attrs__: ClassVar[Any]
else:
# For type checkers without plug-in support use an empty protocol that
# will (hopefully) be combined into a union.
class AttrsInstance_(Protocol):
pass

View File

@ -0,0 +1,86 @@
# SPDX-License-Identifier: MIT
from functools import total_ordering
from ._funcs import astuple
from ._make import attrib, attrs
@total_ordering
@attrs(eq=False, order=False, slots=True, frozen=True)
class VersionInfo:
"""
A version object that can be compared to tuple of length 1--4:
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
True
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
True
>>> vi = attr.VersionInfo(19, 2, 0, "final")
>>> vi < (19, 1, 1)
False
>>> vi < (19,)
False
>>> vi == (19, 2,)
True
>>> vi == (19, 2, 1)
False
.. versionadded:: 19.2
"""
year = attrib(type=int)
minor = attrib(type=int)
micro = attrib(type=int)
releaselevel = attrib(type=str)
@classmethod
def _from_version_string(cls, s):
"""
Parse *s* and return a _VersionInfo.
"""
v = s.split(".")
if len(v) == 3:
v.append("final")
return cls(
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
)
def _ensure_tuple(self, other):
"""
Ensure *other* is a tuple of a valid length.
Returns a possibly transformed *other* and ourselves as a tuple of
the same length as *other*.
"""
if self.__class__ is other.__class__:
other = astuple(other)
if not isinstance(other, tuple):
raise NotImplementedError
if not (1 <= len(other) <= 4):
raise NotImplementedError
return astuple(self)[: len(other)], other
def __eq__(self, other):
try:
us, them = self._ensure_tuple(other)
except NotImplementedError:
return NotImplemented
return us == them
def __lt__(self, other):
try:
us, them = self._ensure_tuple(other)
except NotImplementedError:
return NotImplemented
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
# have to do anything special with releaselevel for now.
return us < them

View File

@ -0,0 +1,9 @@
class VersionInfo:
@property
def year(self) -> int: ...
@property
def minor(self) -> int: ...
@property
def micro(self) -> int: ...
@property
def releaselevel(self) -> str: ...

View File

@ -0,0 +1,151 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful converters.
"""
import typing
from ._compat import _AnnotationExtractor
from ._make import NOTHING, Factory, pipe
__all__ = [
"default_if_none",
"optional",
"pipe",
"to_bool",
]
def optional(converter):
"""
A converter that allows an attribute to be optional. An optional attribute
is one which can be set to `None`.
Type annotations will be inferred from the wrapped converter's, if it has
any.
Args:
converter (typing.Callable):
the converter that is used for non-`None` values.
.. versionadded:: 17.1.0
"""
def optional_converter(val):
if val is None:
return None
return converter(val)
xtr = _AnnotationExtractor(converter)
t = xtr.get_first_param_type()
if t:
optional_converter.__annotations__["val"] = typing.Optional[t]
rt = xtr.get_return_type()
if rt:
optional_converter.__annotations__["return"] = typing.Optional[rt]
return optional_converter
def default_if_none(default=NOTHING, factory=None):
"""
A converter that allows to replace `None` values by *default* or the result
of *factory*.
Args:
default:
Value to be used if `None` is passed. Passing an instance of
`attrs.Factory` is supported, however the ``takes_self`` option is
*not*.
factory (typing.Callable):
A callable that takes no parameters whose result is used if `None`
is passed.
Raises:
TypeError: If **neither** *default* or *factory* is passed.
TypeError: If **both** *default* and *factory* are passed.
ValueError:
If an instance of `attrs.Factory` is passed with
``takes_self=True``.
.. versionadded:: 18.2.0
"""
if default is NOTHING and factory is None:
msg = "Must pass either `default` or `factory`."
raise TypeError(msg)
if default is not NOTHING and factory is not None:
msg = "Must pass either `default` or `factory` but not both."
raise TypeError(msg)
if factory is not None:
default = Factory(factory)
if isinstance(default, Factory):
if default.takes_self:
msg = "`takes_self` is not supported by default_if_none."
raise ValueError(msg)
def default_if_none_converter(val):
if val is not None:
return val
return default.factory()
else:
def default_if_none_converter(val):
if val is not None:
return val
return default
return default_if_none_converter
def to_bool(val):
"""
Convert "boolean" strings (for example, from environment variables) to real
booleans.
Values mapping to `True`:
- ``True``
- ``"true"`` / ``"t"``
- ``"yes"`` / ``"y"``
- ``"on"``
- ``"1"``
- ``1``
Values mapping to `False`:
- ``False``
- ``"false"`` / ``"f"``
- ``"no"`` / ``"n"``
- ``"off"``
- ``"0"``
- ``0``
Raises:
ValueError: For any other value.
.. versionadded:: 21.3.0
"""
if isinstance(val, str):
val = val.lower()
if val in (True, "true", "t", "yes", "y", "on", "1", 1):
return True
if val in (False, "false", "f", "no", "n", "off", "0", 0):
return False
msg = f"Cannot convert value to bool: {val!r}"
raise ValueError(msg)

View File

@ -0,0 +1,13 @@
from typing import Callable, TypeVar, overload
from attrs import _ConverterType
_T = TypeVar("_T")
def pipe(*validators: _ConverterType) -> _ConverterType: ...
def optional(converter: _ConverterType) -> _ConverterType: ...
@overload
def default_if_none(default: _T) -> _ConverterType: ...
@overload
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
def to_bool(val: str) -> bool: ...

View File

@ -0,0 +1,95 @@
# SPDX-License-Identifier: MIT
from __future__ import annotations
from typing import ClassVar
class FrozenError(AttributeError):
"""
A frozen/immutable instance or attribute have been attempted to be
modified.
It mirrors the behavior of ``namedtuples`` by using the same error message
and subclassing `AttributeError`.
.. versionadded:: 20.1.0
"""
msg = "can't set attribute"
args: ClassVar[tuple[str]] = [msg]
class FrozenInstanceError(FrozenError):
"""
A frozen instance has been attempted to be modified.
.. versionadded:: 16.1.0
"""
class FrozenAttributeError(FrozenError):
"""
A frozen attribute has been attempted to be modified.
.. versionadded:: 20.1.0
"""
class AttrsAttributeNotFoundError(ValueError):
"""
An *attrs* function couldn't find an attribute that the user asked for.
.. versionadded:: 16.2.0
"""
class NotAnAttrsClassError(ValueError):
"""
A non-*attrs* class has been passed into an *attrs* function.
.. versionadded:: 16.2.0
"""
class DefaultAlreadySetError(RuntimeError):
"""
A default has been set when defining the field and is attempted to be reset
using the decorator.
.. versionadded:: 17.1.0
"""
class UnannotatedAttributeError(RuntimeError):
"""
A class with ``auto_attribs=True`` has a field without a type annotation.
.. versionadded:: 17.3.0
"""
class PythonTooOldError(RuntimeError):
"""
It was attempted to use an *attrs* feature that requires a newer Python
version.
.. versionadded:: 18.2.0
"""
class NotCallableError(TypeError):
"""
A field requiring a callable has been set with a value that is not
callable.
.. versionadded:: 19.2.0
"""
def __init__(self, msg, value):
super(TypeError, self).__init__(msg, value)
self.msg = msg
self.value = value
def __str__(self):
return str(self.msg)

View File

@ -0,0 +1,17 @@
from typing import Any
class FrozenError(AttributeError):
msg: str = ...
class FrozenInstanceError(FrozenError): ...
class FrozenAttributeError(FrozenError): ...
class AttrsAttributeNotFoundError(ValueError): ...
class NotAnAttrsClassError(ValueError): ...
class DefaultAlreadySetError(RuntimeError): ...
class UnannotatedAttributeError(RuntimeError): ...
class PythonTooOldError(RuntimeError): ...
class NotCallableError(TypeError):
msg: str = ...
value: Any = ...
def __init__(self, msg: str, value: Any) -> None: ...

View File

@ -0,0 +1,72 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
"""
from ._make import Attribute
def _split_what(what):
"""
Returns a tuple of `frozenset`s of classes and attributes.
"""
return (
frozenset(cls for cls in what if isinstance(cls, type)),
frozenset(cls for cls in what if isinstance(cls, str)),
frozenset(cls for cls in what if isinstance(cls, Attribute)),
)
def include(*what):
"""
Create a filter that only allows *what*.
Args:
what (list[type, str, attrs.Attribute]):
What to include. Can be a type, a name, or an attribute.
Returns:
Callable:
A callable that can be passed to `attrs.asdict`'s and
`attrs.astuple`'s *filter* argument.
.. versionchanged:: 23.1.0 Accept strings with field names.
"""
cls, names, attrs = _split_what(what)
def include_(attribute, value):
return (
value.__class__ in cls
or attribute.name in names
or attribute in attrs
)
return include_
def exclude(*what):
"""
Create a filter that does **not** allow *what*.
Args:
what (list[type, str, attrs.Attribute]):
What to exclude. Can be a type, a name, or an attribute.
Returns:
Callable:
A callable that can be passed to `attrs.asdict`'s and
`attrs.astuple`'s *filter* argument.
.. versionchanged:: 23.3.0 Accept field name string as input argument
"""
cls, names, attrs = _split_what(what)
def exclude_(attribute, value):
return not (
value.__class__ in cls
or attribute.name in names
or attribute in attrs
)
return exclude_

View File

@ -0,0 +1,6 @@
from typing import Any
from . import Attribute, _FilterType
def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...

View File

@ -0,0 +1,79 @@
# SPDX-License-Identifier: MIT
"""
Commonly used hooks for on_setattr.
"""
from . import _config
from .exceptions import FrozenAttributeError
def pipe(*setters):
"""
Run all *setters* and return the return value of the last one.
.. versionadded:: 20.1.0
"""
def wrapped_pipe(instance, attrib, new_value):
rv = new_value
for setter in setters:
rv = setter(instance, attrib, rv)
return rv
return wrapped_pipe
def frozen(_, __, ___):
"""
Prevent an attribute to be modified.
.. versionadded:: 20.1.0
"""
raise FrozenAttributeError()
def validate(instance, attrib, new_value):
"""
Run *attrib*'s validator on *new_value* if it has one.
.. versionadded:: 20.1.0
"""
if _config._run_validators is False:
return new_value
v = attrib.validator
if not v:
return new_value
v(instance, attrib, new_value)
return new_value
def convert(instance, attrib, new_value):
"""
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
result.
.. versionadded:: 20.1.0
"""
c = attrib.converter
if c:
# This can be removed once we drop 3.8 and use attrs.Converter instead.
from ._make import Converter
if not isinstance(c, Converter):
return c(new_value)
return c(new_value, instance, attrib)
return new_value
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
# Sphinx's autodata stopped working, so the docstring is inlined in the API
# docs.
NO_OP = object()

View File

@ -0,0 +1,20 @@
from typing import Any, NewType, NoReturn, TypeVar
from . import Attribute
from attrs import _OnSetAttrType
_T = TypeVar("_T")
def frozen(
instance: Any, attribute: Attribute[Any], new_value: Any
) -> NoReturn: ...
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
# convert is allowed to return Any, because they can be chained using pipe.
def convert(
instance: Any, attribute: Attribute[Any], new_value: Any
) -> Any: ...
_NoOpType = NewType("_NoOpType", object)
NO_OP: _NoOpType

View File

@ -0,0 +1,711 @@
# SPDX-License-Identifier: MIT
"""
Commonly useful validators.
"""
import operator
import re
from contextlib import contextmanager
from re import Pattern
from ._config import get_run_validators, set_run_validators
from ._make import _AndValidator, and_, attrib, attrs
from .converters import default_if_none
from .exceptions import NotCallableError
__all__ = [
"and_",
"deep_iterable",
"deep_mapping",
"disabled",
"ge",
"get_disabled",
"gt",
"in_",
"instance_of",
"is_callable",
"le",
"lt",
"matches_re",
"max_len",
"min_len",
"not_",
"optional",
"or_",
"set_disabled",
]
def set_disabled(disabled):
"""
Globally disable or enable running validators.
By default, they are run.
Args:
disabled (bool): If `True`, disable running all validators.
.. warning::
This function is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(not disabled)
def get_disabled():
"""
Return a bool indicating whether validators are currently disabled or not.
Returns:
bool:`True` if validators are currently disabled.
.. versionadded:: 21.3.0
"""
return not get_run_validators()
@contextmanager
def disabled():
"""
Context manager that disables running validators within its context.
.. warning::
This context manager is not thread-safe!
.. versionadded:: 21.3.0
"""
set_run_validators(False)
try:
yield
finally:
set_run_validators(True)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _InstanceOfValidator:
type = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not isinstance(value, self.type):
msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
raise TypeError(
msg,
attr,
self.type,
value,
)
def __repr__(self):
return f"<instance_of validator for type {self.type!r}>"
def instance_of(type):
"""
A validator that raises a `TypeError` if the initializer is called with a
wrong type for this particular attribute (checks are performed using
`isinstance` therefore it's also valid to pass a tuple of types).
Args:
type (type | tuple[type]): The type to check for.
Raises:
TypeError:
With a human readable error message, the attribute (of type
`attrs.Attribute`), the expected type, and the value it got.
"""
return _InstanceOfValidator(type)
@attrs(repr=False, frozen=True, slots=True)
class _MatchesReValidator:
pattern = attrib()
match_func = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.match_func(value):
msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
raise ValueError(
msg,
attr,
self.pattern,
value,
)
def __repr__(self):
return f"<matches_re validator for pattern {self.pattern!r}>"
def matches_re(regex, flags=0, func=None):
r"""
A validator that raises `ValueError` if the initializer is called with a
string that doesn't match *regex*.
Args:
regex (str, re.Pattern):
A regex string or precompiled pattern to match against
flags (int):
Flags that will be passed to the underlying re function (default 0)
func (typing.Callable):
Which underlying `re` function to call. Valid options are
`re.fullmatch`, `re.search`, and `re.match`; the default `None`
means `re.fullmatch`. For performance reasons, the pattern is
always precompiled using `re.compile`.
.. versionadded:: 19.2.0
.. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
"""
valid_funcs = (re.fullmatch, None, re.search, re.match)
if func not in valid_funcs:
msg = "'func' must be one of {}.".format(
", ".join(
sorted(e and e.__name__ or "None" for e in set(valid_funcs))
)
)
raise ValueError(msg)
if isinstance(regex, Pattern):
if flags:
msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
raise TypeError(msg)
pattern = regex
else:
pattern = re.compile(regex, flags)
if func is re.match:
match_func = pattern.match
elif func is re.search:
match_func = pattern.search
else:
match_func = pattern.fullmatch
return _MatchesReValidator(pattern, match_func)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _OptionalValidator:
validator = attrib()
def __call__(self, inst, attr, value):
if value is None:
return
self.validator(inst, attr, value)
def __repr__(self):
return f"<optional validator for {self.validator!r} or None>"
def optional(validator):
"""
A validator that makes an attribute optional. An optional attribute is one
which can be set to `None` in addition to satisfying the requirements of
the sub-validator.
Args:
validator
(typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
A validator (or validators) that is used for non-`None` values.
.. versionadded:: 15.1.0
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
.. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
"""
if isinstance(validator, (list, tuple)):
return _OptionalValidator(_AndValidator(validator))
return _OptionalValidator(validator)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _InValidator:
options = attrib()
_original_options = attrib(hash=False)
def __call__(self, inst, attr, value):
try:
in_options = value in self.options
except TypeError: # e.g. `1 in "abc"`
in_options = False
if not in_options:
msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
raise ValueError(
msg,
attr,
self._original_options,
value,
)
def __repr__(self):
return f"<in_ validator with options {self._original_options!r}>"
def in_(options):
"""
A validator that raises a `ValueError` if the initializer is called with a
value that does not belong in the *options* provided.
The check is performed using ``value in options``, so *options* has to
support that operation.
To keep the validator hashable, dicts, lists, and sets are transparently
transformed into a `tuple`.
Args:
options: Allowed options.
Raises:
ValueError:
With a human readable error message, the attribute (of type
`attrs.Attribute`), the expected options, and the value it got.
.. versionadded:: 17.1.0
.. versionchanged:: 22.1.0
The ValueError was incomplete until now and only contained the human
readable error message. Now it contains all the information that has
been promised since 17.1.0.
.. versionchanged:: 24.1.0
*options* that are a list, dict, or a set are now transformed into a
tuple to keep the validator hashable.
"""
repr_options = options
if isinstance(options, (list, dict, set)):
options = tuple(options)
return _InValidator(options, repr_options)
@attrs(repr=False, slots=False, unsafe_hash=True)
class _IsCallableValidator:
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not callable(value):
message = (
"'{name}' must be callable "
"(got {value!r} that is a {actual!r})."
)
raise NotCallableError(
msg=message.format(
name=attr.name, value=value, actual=value.__class__
),
value=value,
)
def __repr__(self):
return "<is_callable validator>"
def is_callable():
"""
A validator that raises a `attrs.exceptions.NotCallableError` if the
initializer is called with a value for this particular attribute that is
not callable.
.. versionadded:: 19.1.0
Raises:
attrs.exceptions.NotCallableError:
With a human readable error message containing the attribute
(`attrs.Attribute`) name, and the value it got.
"""
return _IsCallableValidator()
@attrs(repr=False, slots=True, unsafe_hash=True)
class _DeepIterable:
member_validator = attrib(validator=is_callable())
iterable_validator = attrib(
default=None, validator=optional(is_callable())
)
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if self.iterable_validator is not None:
self.iterable_validator(inst, attr, value)
for member in value:
self.member_validator(inst, attr, member)
def __repr__(self):
iterable_identifier = (
""
if self.iterable_validator is None
else f" {self.iterable_validator!r}"
)
return (
f"<deep_iterable validator for{iterable_identifier}"
f" iterables of {self.member_validator!r}>"
)
def deep_iterable(member_validator, iterable_validator=None):
"""
A validator that performs deep validation of an iterable.
Args:
member_validator: Validator to apply to iterable members.
iterable_validator:
Validator to apply to iterable itself (optional).
Raises
TypeError: if any sub-validators fail
.. versionadded:: 19.1.0
"""
if isinstance(member_validator, (list, tuple)):
member_validator = and_(*member_validator)
return _DeepIterable(member_validator, iterable_validator)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _DeepMapping:
key_validator = attrib(validator=is_callable())
value_validator = attrib(validator=is_callable())
mapping_validator = attrib(default=None, validator=optional(is_callable()))
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if self.mapping_validator is not None:
self.mapping_validator(inst, attr, value)
for key in value:
self.key_validator(inst, attr, key)
self.value_validator(inst, attr, value[key])
def __repr__(self):
return f"<deep_mapping validator for objects mapping {self.key_validator!r} to {self.value_validator!r}>"
def deep_mapping(key_validator, value_validator, mapping_validator=None):
"""
A validator that performs deep validation of a dictionary.
Args:
key_validator: Validator to apply to dictionary keys.
value_validator: Validator to apply to dictionary values.
mapping_validator:
Validator to apply to top-level mapping attribute (optional).
.. versionadded:: 19.1.0
Raises:
TypeError: if any sub-validators fail
"""
return _DeepMapping(key_validator, value_validator, mapping_validator)
@attrs(repr=False, frozen=True, slots=True)
class _NumberValidator:
bound = attrib()
compare_op = attrib()
compare_func = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not self.compare_func(value, self.bound):
msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
raise ValueError(msg)
def __repr__(self):
return f"<Validator for x {self.compare_op} {self.bound}>"
def lt(val):
"""
A validator that raises `ValueError` if the initializer is called with a
number larger or equal to *val*.
The validator uses `operator.lt` to compare the values.
Args:
val: Exclusive upper bound for values.
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<", operator.lt)
def le(val):
"""
A validator that raises `ValueError` if the initializer is called with a
number greater than *val*.
The validator uses `operator.le` to compare the values.
Args:
val: Inclusive upper bound for values.
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, "<=", operator.le)
def ge(val):
"""
A validator that raises `ValueError` if the initializer is called with a
number smaller than *val*.
The validator uses `operator.ge` to compare the values.
Args:
val: Inclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">=", operator.ge)
def gt(val):
"""
A validator that raises `ValueError` if the initializer is called with a
number smaller or equal to *val*.
The validator uses `operator.ge` to compare the values.
Args:
val: Exclusive lower bound for values
.. versionadded:: 21.3.0
"""
return _NumberValidator(val, ">", operator.gt)
@attrs(repr=False, frozen=True, slots=True)
class _MaxLengthValidator:
max_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) > self.max_length:
msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
raise ValueError(msg)
def __repr__(self):
return f"<max_len validator for {self.max_length}>"
def max_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is longer than *length*.
Args:
length (int): Maximum length of the string or iterable
.. versionadded:: 21.3.0
"""
return _MaxLengthValidator(length)
@attrs(repr=False, frozen=True, slots=True)
class _MinLengthValidator:
min_length = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if len(value) < self.min_length:
msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
raise ValueError(msg)
def __repr__(self):
return f"<min_len validator for {self.min_length}>"
def min_len(length):
"""
A validator that raises `ValueError` if the initializer is called
with a string or iterable that is shorter than *length*.
Args:
length (int): Minimum length of the string or iterable
.. versionadded:: 22.1.0
"""
return _MinLengthValidator(length)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _SubclassOfValidator:
type = attrib()
def __call__(self, inst, attr, value):
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not issubclass(value, self.type):
msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
raise TypeError(
msg,
attr,
self.type,
value,
)
def __repr__(self):
return f"<subclass_of validator for type {self.type!r}>"
def _subclass_of(type):
"""
A validator that raises a `TypeError` if the initializer is called with a
wrong type for this particular attribute (checks are performed using
`issubclass` therefore it's also valid to pass a tuple of types).
Args:
type (type | tuple[type, ...]): The type(s) to check for.
Raises:
TypeError:
With a human readable error message, the attribute (of type
`attrs.Attribute`), the expected type, and the value it got.
"""
return _SubclassOfValidator(type)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _NotValidator:
validator = attrib()
msg = attrib(
converter=default_if_none(
"not_ validator child '{validator!r}' "
"did not raise a captured error"
)
)
exc_types = attrib(
validator=deep_iterable(
member_validator=_subclass_of(Exception),
iterable_validator=instance_of(tuple),
),
)
def __call__(self, inst, attr, value):
try:
self.validator(inst, attr, value)
except self.exc_types:
pass # suppress error to invert validity
else:
raise ValueError(
self.msg.format(
validator=self.validator,
exc_types=self.exc_types,
),
attr,
self.validator,
value,
self.exc_types,
)
def __repr__(self):
return f"<not_ validator wrapping {self.validator!r}, capturing {self.exc_types!r}>"
def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
"""
A validator that wraps and logically 'inverts' the validator passed to it.
It will raise a `ValueError` if the provided validator *doesn't* raise a
`ValueError` or `TypeError` (by default), and will suppress the exception
if the provided validator *does*.
Intended to be used with existing validators to compose logic without
needing to create inverted variants, for example, ``not_(in_(...))``.
Args:
validator: A validator to be logically inverted.
msg (str):
Message to raise if validator fails. Formatted with keys
``exc_types`` and ``validator``.
exc_types (tuple[type, ...]):
Exception type(s) to capture. Other types raised by child
validators will not be intercepted and pass through.
Raises:
ValueError:
With a human readable error message, the attribute (of type
`attrs.Attribute`), the validator that failed to raise an
exception, the value it got, and the expected exception types.
.. versionadded:: 22.2.0
"""
try:
exc_types = tuple(exc_types)
except TypeError:
exc_types = (exc_types,)
return _NotValidator(validator, msg, exc_types)
@attrs(repr=False, slots=True, unsafe_hash=True)
class _OrValidator:
validators = attrib()
def __call__(self, inst, attr, value):
for v in self.validators:
try:
v(inst, attr, value)
except Exception: # noqa: BLE001, PERF203, S112
continue
else:
return
msg = f"None of {self.validators!r} satisfied for value {value!r}"
raise ValueError(msg)
def __repr__(self):
return f"<or validator wrapping {self.validators!r}>"
def or_(*validators):
"""
A validator that composes multiple validators into one.
When called on a value, it runs all wrapped validators until one of them is
satisfied.
Args:
validators (~collections.abc.Iterable[typing.Callable]):
Arbitrary number of validators.
Raises:
ValueError:
If no validator is satisfied. Raised with a human-readable error
message listing all the wrapped validators and the value that
failed all of them.
.. versionadded:: 24.1.0
"""
vals = []
for v in validators:
vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
return _OrValidator(tuple(vals))

View File

@ -0,0 +1,83 @@
from typing import (
Any,
AnyStr,
Callable,
Container,
ContextManager,
Iterable,
Mapping,
Match,
Pattern,
TypeVar,
overload,
)
from attrs import _ValidatorType
from attrs import _ValidatorArgType
_T = TypeVar("_T")
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_T3 = TypeVar("_T3")
_I = TypeVar("_I", bound=Iterable)
_K = TypeVar("_K")
_V = TypeVar("_V")
_M = TypeVar("_M", bound=Mapping)
def set_disabled(run: bool) -> None: ...
def get_disabled() -> bool: ...
def disabled() -> ContextManager[None]: ...
# To be more precise on instance_of use some overloads.
# If there are more than 3 items in the tuple then we fall back to Any
@overload
def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
@overload
def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
@overload
def instance_of(
type: tuple[type[_T1], type[_T2]]
) -> _ValidatorType[_T1 | _T2]: ...
@overload
def instance_of(
type: tuple[type[_T1], type[_T2], type[_T3]]
) -> _ValidatorType[_T1 | _T2 | _T3]: ...
@overload
def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
def optional(
validator: (
_ValidatorType[_T]
| list[_ValidatorType[_T]]
| tuple[_ValidatorType[_T]]
),
) -> _ValidatorType[_T | None]: ...
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
def matches_re(
regex: Pattern[AnyStr] | AnyStr,
flags: int = ...,
func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
) -> _ValidatorType[AnyStr]: ...
def deep_iterable(
member_validator: _ValidatorArgType[_T],
iterable_validator: _ValidatorType[_I] | None = ...,
) -> _ValidatorType[_I]: ...
def deep_mapping(
key_validator: _ValidatorType[_K],
value_validator: _ValidatorType[_V],
mapping_validator: _ValidatorType[_M] | None = ...,
) -> _ValidatorType[_M]: ...
def is_callable() -> _ValidatorType[_T]: ...
def lt(val: _T) -> _ValidatorType[_T]: ...
def le(val: _T) -> _ValidatorType[_T]: ...
def ge(val: _T) -> _ValidatorType[_T]: ...
def gt(val: _T) -> _ValidatorType[_T]: ...
def max_len(length: int) -> _ValidatorType[_T]: ...
def min_len(length: int) -> _ValidatorType[_T]: ...
def not_(
validator: _ValidatorType[_T],
*,
msg: str | None = None,
exc_types: type[Exception] | Iterable[type[Exception]] = ...,
) -> _ValidatorType[_T]: ...
def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...