Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Skip internal fields
if f.name.startswith("__") or (not base_fields and (f.name, f.type) in base_fields_types):
continue
# Note fields() doesn't resolve forward refs
f.type = type_hints[f.name]
mapped_fields.append(JsonSchemaField(f, cls.field_mapping().get(f.name, f.name)))
if cls.__serialise_properties:
include_properties = None
if isinstance(cls.__serialise_properties, tuple):
include_properties = set(cls.__serialise_properties)
members = inspect.getmembers(cls, inspect.isdatadescriptor)
for name, member in members:
if name != "__weakref__" and (include_properties is None or name in include_properties):
f = Field(MISSING, None, None, None, None, None, None)
f.name = name
f.type = member.fget.__annotations__['return']
mapped_fields.append(JsonSchemaField(f, name, is_property=True))
return mapped_fields
def _get_field_schema(cls, field: Union[Field, Type], schema_options: SchemaOptions) -> Tuple[JsonDict, bool]:
field_schema: JsonDict = {}
required = True
if isinstance(field, Field):
field_type = field.type
field_meta, required = cls._get_field_meta(field, schema_options.schema_type)
else:
field_type = field
field_meta = FieldMeta(schema_type=schema_options.schema_type)
field_type_name = cls._get_field_type_name(field_type)
if cls._is_json_schema_subclass(field_type):
field_schema = schema_reference(schema_options.schema_type, field_type_name)
else:
# If is optional[...]
if is_optional(field_type):
field_schema = cls._get_field_schema(unwrap_optional(field_type), schema_options)[0]
required = False
elif is_nullable(field_type):
field_schema, required = cls._get_field_schema(unwrap_nullable(field_type), schema_options)
def set_typed_dict_fields(typed_dict_type: Type[Dict[str, Any]]) -> None:
fields = {}
for name, type_ in typed_dict_type.__annotations__.items():
default = getattr(typed_dict_type, name, dataclasses.MISSING)
if isinstance(default, dataclasses.Field):
fields[name] = default
else:
fields[name] = dataclasses.field(default=default)
fields[name].name = name
fields[name].type = type_
fields[name]._field_type = dataclasses._FIELD # type: ignore
typed_dict_type.__dataclass_fields__ = fields # type: ignore
default is the default value of the field. default_factory is a
0-argument function called to initialize a field's value. If init
is True, the field will be a parameter to the class's __init__()
function. If repr is True, the field will be included in the
object's repr(). If hash is True, the field will be included in
the object's hash(). If compare is True, the field will be used
in comparison functions. metadata, if specified, must be a
mapping which is stored but not otherwise examined by dataclass.
It is an error to specify both default and default_factory.
"""
if default is not MISSING and default_factory is not MISSING:
raise ValueError('cannot specify both default and default_factory')
return Field(default, default_factory, init, repr, hash, compare,
metadata)
import dataclasses
import enum
import inspect
from enum import Enum
from typing import *
from typing import cast
from .. import docstring, utils
from ..utils import Dataclass, DataclassType
from ..helpers import dict_field
from .wrapper import Wrapper
from ..logging_utils import get_logger
logger = get_logger(__file__)
class FieldWrapper(Wrapper[dataclasses.Field]):
"""
The FieldWrapper class acts a bit like an 'argparse.Action' class, which
essentially just creates the `option_strings` and `arg_options` that get
passed to the `add_argument(*option_strings, **arg_options)` function of the
`argparse._ArgumentGroup` (in this case represented by the `parent`
attribute, an instance of the class `DataclassWrapper`).
The `option_strings`, `required`, `help`, `metavar`, `default`, etc.
attributes just autogenerate the argument of the same name of the
above-mentioned `add_argument` function. The `arg_options` attribute fills
in the rest and may overwrite these values, depending on the type of field.
The `field` argument is the actually wrapped `dataclasses.Field` instance.
"""
# Wether or not `simple_parsing` should add option_string variants where
>>> annotations = typic.annotations(Foo)
See Also
--------
:py:class:`ResolvedAnnotation`
"""
if not any(
(inspect.ismethod(obj), inspect.isfunction(obj), inspect.isclass(obj))
):
obj = type(obj)
sig = cached_signature(obj)
hints = cached_type_hints(obj)
params: Mapping[str, inspect.Parameter] = sig.parameters
fields: Mapping[str, dataclasses.Field] = {}
if dataclasses.is_dataclass(obj):
fields = {f.name: f for f in dataclasses.fields(obj)}
ann = {}
for name in params.keys() | hints.keys():
param = params.get(name)
hint = hints.get(name)
field = fields.get(name)
annotation = hint or param.annotation # type: ignore
annotation = resolve_supertype(annotation)
param = param or inspect.Parameter(
name,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
default=_empty,
annotation=hint or annotation,
)
if repr(param.default) == "":
...
>>> protocols = typic.protocols(Foo)
See Also
--------
:py:class:`SerdeProtocol`
"""
if not any(
(inspect.ismethod(obj), inspect.isfunction(obj), inspect.isclass(obj))
):
obj = obj.__class__
hints = util.cached_type_hints(obj)
params = util.safe_get_params(obj)
fields: Mapping[str, dataclasses.Field] = {}
if dataclasses.is_dataclass(obj):
fields = {f.name: f for f in dataclasses.fields(obj)}
ann = {}
for name in params.keys() | hints.keys():
param = params.get(name)
hint = hints.get(name)
field = fields.get(name)
annotation = hint or param.annotation # type: ignore
annotation = util.resolve_supertype(annotation)
param = param or inspect.Parameter(
name,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
default=EMPTY,
annotation=hint or annotation,
)
if repr(param.default) == "":
if issubclass(subclass, cls) and subclass is not cls:
derived_classes.append(subclass)
logger.debug(f"All serializable derived classes of {cls} available: {derived_classes}")
from itertools import chain
# All the arguments that the dataclass should be able to accept in
# its 'init'.
req_init_field_names = set(chain(extra_args, init_args))
# Sort the derived classes by their number of init fields, so that
# we choose the first one with all the required fields.
derived_classes.sort(key=lambda dc: len(get_init_fields(dc)))
for child_class in derived_classes:
logger.debug(f"child class: {child_class.__name__}, mro: {child_class.mro()}")
child_init_fields: Dict[str, Field] = get_init_fields(child_class)
child_init_field_names = set(child_init_fields.keys())
if child_init_field_names >= req_init_field_names:
# `child_class` is the first class with all required fields.
logger.debug(f"Using class {child_class} instead of {cls}")
return from_dict(child_class, d, drop_extra_fields=False)
init_args.update(extra_args)
try:
instance = cls(**init_args) # type: ignore
except TypeError as e:
# raise RuntimeError(f"Couldn't instantiate class {cls} using init args {init_args}.")
raise RuntimeError(f"Couldn't instantiate class {cls} using init args {init_args.keys()}: {e}")
for name, value in non_init_args.items():
logger.debug(f"Setting non-init field '{name}' on the instance.")
def _get_fields(cls) -> List[Tuple[Field, str]]: # type: ignore
fields: List[Tuple[Field, str]] = []
for old_field, name in super()._get_fields():
new_field = old_field
# tell hologram we're really an initvar
if old_field.metadata and old_field.metadata.get('init_required'):
new_field = field(init=True, metadata=old_field.metadata)
new_field.name = old_field.name
new_field.type = old_field.type
new_field._field_type = old_field._field_type # type: ignore
fields.append((new_field, name))
return fields