Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def build_typed_field(self, type_hint, extra_kwargs=None):
testclass = dataclasses.make_dataclass('TestDataclass', [('test_field', type_hint)])
serializer = DataclassSerializer(dataclass=testclass)
type_info = field_utils.get_type_info(serializer.dataclass_definition.field_types['test_field'])
extra_kwargs = extra_kwargs or {}
return serializer.build_typed_field('test_field', type_info, extra_kwargs)
def test_nested_list(self):
parent = dataclasses.make_dataclass('Parent', [('nested', typing.List[Simple])])
data = {'nested': [{'value': 'A'}]}
class ParentSerializer(DataclassSerializer):
nested = SimpleSerializer(many=True)
self.check_deserialize(ParentSerializer, dataclass=parent, data=data)
# an optional list of nested dataclasses
optionallist = dataclasses.make_dataclass('optionallist',
[('field', typing.Optional[typing.List[simple]])])
check(optionallist, {'field': None}, optionallist(None))
check(optionallist, {'field': []}, optionallist([]))
check(optionallist, {'field': [{'value': 'A'}]}, optionallist([simple('A')]))
# a list of optional nested dataclasses
listoptional = dataclasses.make_dataclass('listoptional',
[('field', typing.List[typing.Optional[simple]])])
check(listoptional, {'field': []}, listoptional([]))
check(listoptional, {'field': [None]}, listoptional([None]))
check(listoptional, {'field': [{'value': 'A'}]}, listoptional([simple('A')]))
# a dictionary of nested dataclasses
dictvalue = dataclasses.make_dataclass('dictvalue',
[('field', typing.Mapping[str, simple])])
check(dictvalue, {'field': {}}, dictvalue({}))
check(dictvalue, {'field': {'K': {'value': 'A'}}}, dictvalue({'K': simple('A')}))
# an optional dictionary of nested dataclasses
optionaldict = dataclasses.make_dataclass('optionaldict',
[('field', typing.Optional[typing.Dict[str, simple]])])
check(optionaldict, {'field': None}, optionaldict(None))
check(optionaldict, {'field': {}}, optionaldict({}))
check(optionaldict, {'field': {'K': {'value': 'A'}}}, optionaldict({'K': simple('A')}))
# a dictionary of optional nested dataclasses
dictoptional = dataclasses.make_dataclass('dictoptional',
[('field', typing.Dict[str, typing.Optional[simple]])])
check(dictoptional, {'field': {}}, dictoptional({}))
check(dictoptional, {'field': {'K': None}}, dictoptional({'K': None}))
# a dictionary of nested dataclasses
dictvalue = dataclasses.make_dataclass('dictvalue',
[('field', typing.Mapping[str, simple])])
check(dictvalue, {'field': {}}, dictvalue({}))
check(dictvalue, {'field': {'K': {'value': 'A'}}}, dictvalue({'K': simple('A')}))
# an optional dictionary of nested dataclasses
optionaldict = dataclasses.make_dataclass('optionaldict',
[('field', typing.Optional[typing.Dict[str, simple]])])
check(optionaldict, {'field': None}, optionaldict(None))
check(optionaldict, {'field': {}}, optionaldict({}))
check(optionaldict, {'field': {'K': {'value': 'A'}}}, optionaldict({'K': simple('A')}))
# a dictionary of optional nested dataclasses
dictoptional = dataclasses.make_dataclass('dictoptional',
[('field', typing.Dict[str, typing.Optional[simple]])])
check(dictoptional, {'field': {}}, dictoptional({}))
check(dictoptional, {'field': {'K': None}}, dictoptional({'K': None}))
check(dictoptional, {'field': {'K': {'value': 'A'}}}, dictoptional({'K': simple('A')}))
def _make_serializer(name: str, fields: Iterable[Tuple[str, type, bool, Any, type]]):
dataclass_fields = []
serializer_fields = {}
for field_name, annotation, has_default, default, serializer in fields:
default = default if has_default else dataclasses.MISSING
dataclass_fields.append((field_name, annotation, dataclasses.field(default=default)))
if serializer is not None:
is_many = typing_utils.is_iterable_type(annotation)
serializer_fields[field_name] = serializer(many=is_many)
dataclass = dataclasses.make_dataclass(name, dataclass_fields)
return dataclass, _make_dataclass_serializer(dataclass, serializer_fields)
)
field = dataclasses.field(
compare=unique,
hash=unique,
repr=True,
default=default,
)
fields.append((pn, pytype, field))
bases: Tuple[type, ...]
if base_class is not None:
bases = (base_class,)
else:
bases = ()
ptype = dataclasses.make_dataclass(
objtype.get_name(schema).name,
fields=fields,
bases=bases,
frozen=True,
namespace={'_subclasses': subclasses},
)
assert isinstance(ptype, type)
return ptype
def jsonschema_asdataclass(
id_: str, schema: Dict[str, Any], bases: Tuple[type, ...] = ()
) -> Type[Any]:
return dataclasses.make_dataclass(
id_,
[
(
prop_name,
Optional[
jsonschema_asdataclass(f'{id_}_{prop_name}', prop) # noqa
]
if prop['type'] == 'object'
else (
Optional[jsonschema_array(id_, prop_name, prop)]
if prop['type'] == 'array'
else Optional[SCALARS[prop['type']]]
),
dataclasses.field(default=prop.get('default')),
)
for prop_name, prop in schema['properties'].items()
def _get_cls_by_field(cls, fields):
field_cls = {}
for field in fields:
meta = FieldMetadata.get(field)
if meta.proto_type == TYPE_MAP:
assert meta.map_types
kt = cls._cls_for(field, index=0)
vt = cls._cls_for(field, index=1)
field_cls[field.name] = dataclasses.make_dataclass(
"Entry",
[
("key", kt, dataclass_field(1, meta.map_types[0])),
("value", vt, dataclass_field(2, meta.map_types[1])),
],
bases=(Message,),
)
field_cls[field.name + ".value"] = vt
else:
field_cls[field.name] = cls._cls_for(field)
return field_cls