Module fl_server_api.serializers.model¶
View Source
# SPDX-FileCopyrightText: 2024 Benedikt Franke <benedikt.franke@dlr.de>
# SPDX-FileCopyrightText: 2024 Florian Heinrich <florian.heinrich@dlr.de>
#
# SPDX-License-Identifier: Apache-2.0
from django.http import HttpRequest
from logging import getLogger
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from torchinfo import summary
from torchinfo.model_statistics import ModelStatistics
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type
from fl_server_core.models import (
GlobalModel,
LocalModel,
MeanModel,
Model,
SWAGModel,
)
from fl_server_core.models.model import TModel
from fl_server_core.utils.torch_serialization import to_torch_module
from ..utils import get_file
def _model_stats_to_dict(stats: ModelStatistics) -> Dict[str, Any]:
"""
Convert model statistics to a dictionary.
Args:
stats (ModelStatistics): The model statistics.
Returns:
Dict[str, Any]: The dictionary representation of the model statistics.
"""
return dict(
input_size=stats.input_size,
total_input=stats.total_input,
total_mult_adds=stats.total_mult_adds,
total_output_bytes=stats.total_output_bytes,
total_param_bytes=stats.total_param_bytes,
total_params=stats.total_params,
trainable_params=stats.trainable_params,
summary_list=[
dict(
# Metadata
layer_id=layer.layer_id,
class_name=layer.class_name,
depth=layer.depth,
depth_index=layer.depth_index,
executed=layer.executed,
var_name=layer.var_name,
is_leaf_layer=layer.is_leaf_layer,
contains_lazy_param=layer.contains_lazy_param,
# Statistics
is_recursive=layer.is_recursive,
input_size=layer.input_size,
output_size=layer.output_size,
kernel_size=layer.kernel_size,
trainable_params=layer.trainable_params,
num_params=layer.num_params,
param_bytes=layer.param_bytes,
output_bytes=layer.output_bytes,
macs=layer.macs,
)
for layer in stats.summary_list
],
)
def _create_model_serializer(cls: Type[TModel], *, name: Optional[str] = None) -> Type[serializers.ModelSerializer]:
"""
Create a generic Model serializer.
Args:
cls (Type[TModel]): Database Model model.
name (Optional[str], optional): Serializer name. Defaults to `None`.
Returns:
Type[serializers.ModelSerializer]: Model model serializer.
"""
class ModelSerializer(serializers.ModelSerializer):
class Meta:
model = cls
exclude = ["polymorphic_ctype"]
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
return type(name or f"{cls.__name__}Serializer", (ModelSerializer, ), {})
# Create generic serializers for database model models.
GlobalModelSerializer = _create_model_serializer(GlobalModel)
LocalModelSerializer = _create_model_serializer(LocalModel)
ModelFallbackSerializer = _create_model_serializer(Model, name="ModelFallbackSerializer")
MeanModelSerializer = _create_model_serializer(MeanModel)
SWAGModelSerializer = _create_model_serializer(SWAGModel)
class ModelSerializer(serializers.ModelSerializer):
"""
A common serializer for the polymorphic Model class.
The model classes `GlobalModel`, `LocalModel`, `MeanModel`, and `SWAGModel` are supported.
"""
class Meta:
model = Model
exclude = ["polymorphic_ctype"]
@classmethod
def get_serializer(cls, model: Type[Model]):
"""
Get the appropriate serializer based on the model type.
Args:
model (Type[Model]): The model class.
Returns:
Type[serializers.ModelSerializer]: The serializer class for the model.
"""
if model == GlobalModel:
return GlobalModelSerializer
if model == LocalModel:
return LocalModelSerializer
if model == MeanModel:
return MeanModelSerializer
if model == SWAGModel:
return SWAGModelSerializer
getLogger("fl.server").warning(f"Using fallback model serializer for {model.__name__}")
return ModelFallbackSerializer
def to_representation(self, instance: Model):
"""
Generate a dictionary representation of the corresponding model instance.
Args:
instance (Model): The Model instance.
Returns:
dict: The dictionary representation of the Model instance.
"""
serializer = self.get_serializer(instance.__class__)
return serializer(instance, context=self.context).data
class ModelSerializerNoWeights(ModelSerializer):
"""
A model serializer that excludes the model weights.
"""
class Meta:
model = Model
exclude = ["polymorphic_ctype", "weights"]
include = ["has_preprocessing"]
class ModelSerializerNoWeightsWithStats(ModelSerializerNoWeights):
"""
A model serializer that excludes the model weights but includes the model statistics.
"""
class Meta:
model = Model
exclude = ["polymorphic_ctype", "weights"]
include = ["stats"]
include = ["has_preprocessing", "stats"]
#######################################################################################################################
# POST BODY SERIALIZERS #
def load_and_create_model_request(request: HttpRequest) -> Model:
"""
Load and create a model from a Django request.
Args:
request (HttpRequest): The request object.
Returns:
Model: The created model.
"""
model_cls, data = load_model_request(request)
if model_cls is MeanModel:
sub_models = data.pop("models")
model = model_cls.objects.create(**data)
model.models.set(sub_models)
return model
return model_cls.objects.create(**data)
def load_model_request(request: HttpRequest) -> Tuple[Type[Model], Dict[str, Any]]:
"""
Load model data from a Django request.
Args:
request (HttpRequest): The request object.
Returns:
Tuple[Type[Model], Dict[str, Any]]: The model class and parsed request data.
"""
if request.content_type == "application/json":
data = request.data
else: # multipart/form-data
data = request.POST.dict()
data["owner"] = request.user
model_type = data.pop("type", "GLOBAL").upper()
# name and description are required
if "name" not in data:
raise ValidationError("Missing required field: name")
if "description" not in data:
raise ValidationError("Missing required field: description")
# round is an optional field (it may not even make sense to be able to set it)
if "round" not in data:
data["round"] = 0
# QueryDict removes all "None" entities.
# Therefore, we need to manually parse the data to dict (the "normal" python way) and get the correct data
if request.content_type == "application/json":
data["input_shape"] = dict(request.data).get("input_shape", None)
else: # multipart/form-data
data["input_shape"] = dict(request.POST).get("input_shape", None)
# input_shape contains None as String but this is not parseable to a nullable int later -> parsing
if "input_shape" in data:
if data["input_shape"] is None:
# nothing to do
pass
elif isinstance(data["input_shape"], str) and data["input_shape"].upper() in ["NONE", "NULL"]:
data["input_shape"] = None
elif not isinstance(data["input_shape"], Sequence):
data["input_shape"] = None
elif len(data["input_shape"]) < 1:
data["input_shape"] = None
else:
data["input_shape"] = [None if str(x).upper() in ["NONE", "NULL"] else x for x in data["input_shape"]]
# model_file is required except for MEAN models
if model_type != "MEAN":
data["weights"] = get_file(request, "model_file")
verify_model_object(data["weights"])
# model_preprocessing_file is optional
data["preprocessing"] = None
if request.FILES.__contains__("model_preprocessing_file"):
data["preprocessing"] = get_file(request, "model_preprocessing_file")
verify_model_object(data["preprocessing"], "preprocessing")
# MEAN models require a list of model UUIDs
if model_type == "MEAN":
data = _parse_mean_model_models(data)
# return class type and parsed data
match model_type:
case "SWAG": return SWAGModel, data
case "MEAN": return MeanModel, data
case _: return GlobalModel, data
def verify_model_object(model: bytes, file_type_name: str = "model") -> None:
"""
Verify the model object.
Args:
model (bytes): The model object.
file_type_name (str, optional): The file type name. Defaults to "model".
Raises:
ValidationError: If the model object is invalid.
"""
try:
to_torch_module(model)
except Exception as e:
raise ValidationError(f"Invalid {file_type_name} file: {e}") from e
def _parse_mean_model_models(data: Dict[str, Any]) -> Dict[str, Any]:
"""
Parse the models field of a mean model request.
Args:
data (Dict[str, Any]): The request data.
Returns:
Dict[str, Any]: The parsed request data.
Raises:
ValidationError: If the models field is missing or invalid.
"""
if "models" not in data:
raise ValidationError("Missing required field: models")
models = data["models"]
if not isinstance(models, list) or not all([isinstance(m, str) for m in models]):
raise ValidationError("Invalid type for field: models")
data["models"] = Model.objects.filter(id__in=models)
if data["models"].count() != len(models):
raise ValidationError("Invalid model UUIDs found.")
if not all([m.owner.id == data["owner"].id for m in data["models"]]):
raise ValidationError("Invalid model selection. Insufficient model permissions.")
return data
Functions¶
load_and_create_model_request¶
def load_and_create_model_request(
request: django.http.request.HttpRequest
) -> fl_server_core.models.model.Model
Load and create a model from a Django request.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
request | HttpRequest | The request object. | None |
Returns:
Type | Description |
---|---|
Model | The created model. |
View Source
def load_and_create_model_request(request: HttpRequest) -> Model:
"""
Load and create a model from a Django request.
Args:
request (HttpRequest): The request object.
Returns:
Model: The created model.
"""
model_cls, data = load_model_request(request)
if model_cls is MeanModel:
sub_models = data.pop("models")
model = model_cls.objects.create(**data)
model.models.set(sub_models)
return model
return model_cls.objects.create(**data)
load_model_request¶
def load_model_request(
request: django.http.request.HttpRequest
) -> Tuple[Type[fl_server_core.models.model.Model], Dict[str, Any]]
Load model data from a Django request.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
request | HttpRequest | The request object. | None |
Returns:
Type | Description |
---|---|
Tuple[Type[Model], Dict[str, Any]] | The model class and parsed request data. |
View Source
def load_model_request(request: HttpRequest) -> Tuple[Type[Model], Dict[str, Any]]:
"""
Load model data from a Django request.
Args:
request (HttpRequest): The request object.
Returns:
Tuple[Type[Model], Dict[str, Any]]: The model class and parsed request data.
"""
if request.content_type == "application/json":
data = request.data
else: # multipart/form-data
data = request.POST.dict()
data["owner"] = request.user
model_type = data.pop("type", "GLOBAL").upper()
# name and description are required
if "name" not in data:
raise ValidationError("Missing required field: name")
if "description" not in data:
raise ValidationError("Missing required field: description")
# round is an optional field (it may not even make sense to be able to set it)
if "round" not in data:
data["round"] = 0
# QueryDict removes all "None" entities.
# Therefore, we need to manually parse the data to dict (the "normal" python way) and get the correct data
if request.content_type == "application/json":
data["input_shape"] = dict(request.data).get("input_shape", None)
else: # multipart/form-data
data["input_shape"] = dict(request.POST).get("input_shape", None)
# input_shape contains None as String but this is not parseable to a nullable int later -> parsing
if "input_shape" in data:
if data["input_shape"] is None:
# nothing to do
pass
elif isinstance(data["input_shape"], str) and data["input_shape"].upper() in ["NONE", "NULL"]:
data["input_shape"] = None
elif not isinstance(data["input_shape"], Sequence):
data["input_shape"] = None
elif len(data["input_shape"]) < 1:
data["input_shape"] = None
else:
data["input_shape"] = [None if str(x).upper() in ["NONE", "NULL"] else x for x in data["input_shape"]]
# model_file is required except for MEAN models
if model_type != "MEAN":
data["weights"] = get_file(request, "model_file")
verify_model_object(data["weights"])
# model_preprocessing_file is optional
data["preprocessing"] = None
if request.FILES.__contains__("model_preprocessing_file"):
data["preprocessing"] = get_file(request, "model_preprocessing_file")
verify_model_object(data["preprocessing"], "preprocessing")
# MEAN models require a list of model UUIDs
if model_type == "MEAN":
data = _parse_mean_model_models(data)
# return class type and parsed data
match model_type:
case "SWAG": return SWAGModel, data
case "MEAN": return MeanModel, data
case _: return GlobalModel, data
verify_model_object¶
Verify the model object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
model | bytes | The model object. | None |
file_type_name | str | The file type name. Defaults to "model". | "model" |
Raises:
Type | Description |
---|---|
ValidationError | If the model object is invalid. |
View Source
def verify_model_object(model: bytes, file_type_name: str = "model") -> None:
"""
Verify the model object.
Args:
model (bytes): The model object.
file_type_name (str, optional): The file type name. Defaults to "model".
Raises:
ValidationError: If the model object is invalid.
"""
try:
to_torch_module(model)
except Exception as e:
raise ValidationError(f"Invalid {file_type_name} file: {e}") from e
Classes¶
GlobalModelSerializer¶
A ModelSerializer
is just a regular Serializer
, except that:
- A set of default fields are automatically populated.
- A set of default validators are automatically populated.
- Default
.create()
and.update()
implementations are provided.
The process of automatically determining a set of serializer fields based on the model fields is reasonably complex, but you almost certainly don't need to dig into the implementation.
If the ModelSerializer
class doesn't generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a Serializer
class.
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
analyze_torch_model¶
View Source
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
View Source
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
LocalModelSerializer¶
A ModelSerializer
is just a regular Serializer
, except that:
- A set of default fields are automatically populated.
- A set of default validators are automatically populated.
- Default
.create()
and.update()
implementations are provided.
The process of automatically determining a set of serializer fields based on the model fields is reasonably complex, but you almost certainly don't need to dig into the implementation.
If the ModelSerializer
class doesn't generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a Serializer
class.
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
analyze_torch_model¶
View Source
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
View Source
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
MeanModelSerializer¶
A ModelSerializer
is just a regular Serializer
, except that:
- A set of default fields are automatically populated.
- A set of default validators are automatically populated.
- Default
.create()
and.update()
implementations are provided.
The process of automatically determining a set of serializer fields based on the model fields is reasonably complex, but you almost certainly don't need to dig into the implementation.
If the ModelSerializer
class doesn't generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a Serializer
class.
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
analyze_torch_model¶
View Source
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
View Source
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
ModelFallbackSerializer¶
class ModelFallbackSerializer(
instance=None,
data=<class 'rest_framework.fields.empty'>,
**kwargs
)
A ModelSerializer
is just a regular Serializer
, except that:
- A set of default fields are automatically populated.
- A set of default validators are automatically populated.
- Default
.create()
and.update()
implementations are provided.
The process of automatically determining a set of serializer fields based on the model fields is reasonably complex, but you almost certainly don't need to dig into the implementation.
If the ModelSerializer
class doesn't generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a Serializer
class.
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
analyze_torch_model¶
View Source
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
View Source
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
ModelSerializer¶
A common serializer for the polymorphic Model class.
The model classes GlobalModel
, LocalModel
, MeanModel
, and SWAGModel
are supported.
View Source
class ModelSerializer(serializers.ModelSerializer):
"""
A common serializer for the polymorphic Model class.
The model classes `GlobalModel`, `LocalModel`, `MeanModel`, and `SWAGModel` are supported.
"""
class Meta:
model = Model
exclude = ["polymorphic_ctype"]
@classmethod
def get_serializer(cls, model: Type[Model]):
"""
Get the appropriate serializer based on the model type.
Args:
model (Type[Model]): The model class.
Returns:
Type[serializers.ModelSerializer]: The serializer class for the model.
"""
if model == GlobalModel:
return GlobalModelSerializer
if model == LocalModel:
return LocalModelSerializer
if model == MeanModel:
return MeanModelSerializer
if model == SWAGModel:
return SWAGModelSerializer
getLogger("fl.server").warning(f"Using fallback model serializer for {model.__name__}")
return ModelFallbackSerializer
def to_representation(self, instance: Model):
"""
Generate a dictionary representation of the corresponding model instance.
Args:
instance (Model): The Model instance.
Returns:
dict: The dictionary representation of the Model instance.
"""
serializer = self.get_serializer(instance.__class__)
return serializer(instance, context=self.context).data
Ancestors (in MRO)¶
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Descendants¶
- fl_server_api.serializers.model.ModelSerializerNoWeights
Class variables¶
Static methods¶
get_serializer¶
Get the appropriate serializer based on the model type.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
model | Type[Model] | The model class. | None |
Returns:
Type | Description |
---|---|
Type[serializers.ModelSerializer] | The serializer class for the model. |
View Source
@classmethod
def get_serializer(cls, model: Type[Model]):
"""
Get the appropriate serializer based on the model type.
Args:
model (Type[Model]): The model class.
Returns:
Type[serializers.ModelSerializer]: The serializer class for the model.
"""
if model == GlobalModel:
return GlobalModelSerializer
if model == LocalModel:
return LocalModelSerializer
if model == MeanModel:
return MeanModelSerializer
if model == SWAGModel:
return SWAGModelSerializer
getLogger("fl.server").warning(f"Using fallback model serializer for {model.__name__}")
return ModelFallbackSerializer
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
Generate a dictionary representation of the corresponding model instance.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
instance | Model | The Model instance. | None |
Returns:
Type | Description |
---|---|
dict | The dictionary representation of the Model instance. |
View Source
def to_representation(self, instance: Model):
"""
Generate a dictionary representation of the corresponding model instance.
Args:
instance (Model): The Model instance.
Returns:
dict: The dictionary representation of the Model instance.
"""
serializer = self.get_serializer(instance.__class__)
return serializer(instance, context=self.context).data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
ModelSerializerNoWeights¶
class ModelSerializerNoWeights(
instance=None,
data=<class 'rest_framework.fields.empty'>,
**kwargs
)
A model serializer that excludes the model weights.
View Source
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Descendants¶
- fl_server_api.serializers.model.ModelSerializerNoWeightsWithStats
Class variables¶
Static methods¶
get_serializer¶
Get the appropriate serializer based on the model type.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
model | Type[Model] | The model class. | None |
Returns:
Type | Description |
---|---|
Type[serializers.ModelSerializer] | The serializer class for the model. |
View Source
@classmethod
def get_serializer(cls, model: Type[Model]):
"""
Get the appropriate serializer based on the model type.
Args:
model (Type[Model]): The model class.
Returns:
Type[serializers.ModelSerializer]: The serializer class for the model.
"""
if model == GlobalModel:
return GlobalModelSerializer
if model == LocalModel:
return LocalModelSerializer
if model == MeanModel:
return MeanModelSerializer
if model == SWAGModel:
return SWAGModelSerializer
getLogger("fl.server").warning(f"Using fallback model serializer for {model.__name__}")
return ModelFallbackSerializer
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
Generate a dictionary representation of the corresponding model instance.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
instance | Model | The Model instance. | None |
Returns:
Type | Description |
---|---|
dict | The dictionary representation of the Model instance. |
View Source
def to_representation(self, instance: Model):
"""
Generate a dictionary representation of the corresponding model instance.
Args:
instance (Model): The Model instance.
Returns:
dict: The dictionary representation of the Model instance.
"""
serializer = self.get_serializer(instance.__class__)
return serializer(instance, context=self.context).data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
ModelSerializerNoWeightsWithStats¶
class ModelSerializerNoWeightsWithStats(
instance=None,
data=<class 'rest_framework.fields.empty'>,
**kwargs
)
A model serializer that excludes the model weights but includes the model statistics.
View Source
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializerNoWeights
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
get_serializer¶
Get the appropriate serializer based on the model type.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
model | Type[Model] | The model class. | None |
Returns:
Type | Description |
---|---|
Type[serializers.ModelSerializer] | The serializer class for the model. |
View Source
@classmethod
def get_serializer(cls, model: Type[Model]):
"""
Get the appropriate serializer based on the model type.
Args:
model (Type[Model]): The model class.
Returns:
Type[serializers.ModelSerializer]: The serializer class for the model.
"""
if model == GlobalModel:
return GlobalModelSerializer
if model == LocalModel:
return LocalModelSerializer
if model == MeanModel:
return MeanModelSerializer
if model == SWAGModel:
return SWAGModelSerializer
getLogger("fl.server").warning(f"Using fallback model serializer for {model.__name__}")
return ModelFallbackSerializer
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
Generate a dictionary representation of the corresponding model instance.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
instance | Model | The Model instance. | None |
Returns:
Type | Description |
---|---|
dict | The dictionary representation of the Model instance. |
View Source
def to_representation(self, instance: Model):
"""
Generate a dictionary representation of the corresponding model instance.
Args:
instance (Model): The Model instance.
Returns:
dict: The dictionary representation of the Model instance.
"""
serializer = self.get_serializer(instance.__class__)
return serializer(instance, context=self.context).data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)
SWAGModelSerializer¶
A ModelSerializer
is just a regular Serializer
, except that:
- A set of default fields are automatically populated.
- A set of default validators are automatically populated.
- Default
.create()
and.update()
implementations are provided.
The process of automatically determining a set of serializer fields based on the model fields is reasonably complex, but you almost certainly don't need to dig into the implementation.
If the ModelSerializer
class doesn't generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a Serializer
class.
Ancestors (in MRO)¶
- fl_server_api.serializers.model.ModelSerializer
- rest_framework.serializers.ModelSerializer
- rest_framework.serializers.Serializer
- rest_framework.serializers.BaseSerializer
- rest_framework.fields.Field
Class variables¶
Static methods¶
many_init¶
This method implements the creation of a ListSerializer
parent
class when many=True
is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent and child classes in order to try to cover the general case. If you're overriding this method you'll probably want something much simpler, eg:
@classmethod def many_init(cls, args, kwargs): kwargs['child'] = cls() return CustomListSerializer(args, **kwargs)
View Source
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
allow_empty = kwargs.pop('allow_empty', None)
max_length = kwargs.pop('max_length', None)
min_length = kwargs.pop('min_length', None)
child_serializer = cls(*args, **kwargs)
list_kwargs = {
'child': child_serializer,
}
if allow_empty is not None:
list_kwargs['allow_empty'] = allow_empty
if max_length is not None:
list_kwargs['max_length'] = max_length
if min_length is not None:
list_kwargs['min_length'] = min_length
list_kwargs.update({
key: value for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
})
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
Instance variables¶
Returns the context as passed to the root serializer on initialization.
Returns the top-level serializer for this field.
Methods¶
analyze_torch_model¶
View Source
def analyze_torch_model(self, instance: Model):
if not isinstance(instance, GlobalModel) or not instance.input_shape:
return None
# try to analyze the model stats
try:
torch_model = instance.get_torch_model()
input_shape: List[int] = [1 if x is None else x for x in instance.input_shape]
stats = summary(torch_model, input_size=input_shape, verbose=0)
return _model_stats_to_dict(stats)
except Exception as e:
getLogger("fl.server").warning(f"Failed to analyze model {instance.id}: {e}")
return None
bind¶
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
View Source
def bind(self, field_name, parent):
"""
Initializes the field name and parent for the field instance.
Called when a field is added to the parent serializer instance.
"""
# In order to enforce a consistent style, we error if a redundant
# 'source' argument has been used. For example:
# my_field = serializer.CharField(source='my_field')
assert self.source != field_name, (
"It is redundant to specify `source='%s'` on field '%s' in "
"serializer '%s', because it is the same as the field name. "
"Remove the `source` keyword argument." %
(field_name, self.__class__.__name__, parent.__class__.__name__)
)
self.field_name = field_name
self.parent = parent
# `self.label` should default to being based on the field name.
if self.label is None:
self.label = field_name.replace('_', ' ').capitalize()
# self.source should default to being the same as the field name.
if self.source is None:
self.source = field_name
# self.source_attrs is a list of attributes that need to be looked up
# when serializing the instance, or populating the validated data.
if self.source == '*':
self.source_attrs = []
else:
self.source_attrs = self.source.split('.')
build_field¶
Return a two tuple of (cls, kwargs) to build a serializer field with.
View Source
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
build_nested_field¶
Create nested fields for forward and reverse relationships.
View Source
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
fields = '__all__'
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
build_property_field¶
Create a read only field for model methods and properties.
View Source
build_relational_field¶
Create fields for forward and reverse relationships.
View Source
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
to_field = field_kwargs.pop('to_field', None)
if to_field and not relation_info.reverse and not relation_info.related_model._meta.get_field(to_field).primary_key:
field_kwargs['slug_field'] = to_field
field_class = self.serializer_related_to_field
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
build_standard_field¶
Create regular model fields.
View Source
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
# Special case to handle when a OneToOneField is also the primary key
if model_field.one_to_one and model_field.primary_key:
field_class = self.serializer_related_field
field_kwargs['queryset'] = model_field.related_model.objects
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = {
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
}
for key in list(field_kwargs):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
is_django_jsonfield = hasattr(models, 'JSONField') and isinstance(model_field, models.JSONField)
if (postgres_fields and isinstance(model_field, postgres_fields.JSONField)) or is_django_jsonfield:
# Populate the `encoder` argument of `JSONField` instances generated
# for the model `JSONField`.
field_kwargs['encoder'] = getattr(model_field, 'encoder', None)
if is_django_jsonfield:
field_kwargs['decoder'] = getattr(model_field, 'decoder', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgreSQL specific `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
build_unknown_field¶
Raise an error on any unknown fields.
View Source
build_url_field¶
Create a field representing the object's own URL.
View Source
create¶
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they cannot be set until the model is instantiated, in which case the implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit .create()
method.
View Source
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass._default_manager.create(**validated_data)
except TypeError:
tb = traceback.format_exc()
msg = (
'Got a `TypeError` when calling `%s.%s.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.%s.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception was:\n %s' %
(
ModelClass.__name__,
ModelClass._default_manager.name,
ModelClass.__name__,
ModelClass._default_manager.name,
self.__class__.__name__,
tb
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
field = getattr(instance, field_name)
field.set(value)
return instance
fail¶
A helper method that simply raises a validation error.
View Source
def fail(self, key, **kwargs):
"""
A helper method that simply raises a validation error.
"""
try:
msg = self.error_messages[key]
except KeyError:
class_name = self.__class__.__name__
msg = MISSING_ERROR_MESSAGE.format(class_name=class_name, key=key)
raise AssertionError(msg)
message_string = msg.format(**kwargs)
raise ValidationError(message_string, code=key)
fields¶
A dictionary of {field_name: field_instance}.
get_attribute¶
Given the outgoing object instance, return the primitive value
that should be used for this field.
View Source
def get_attribute(self, instance):
"""
Given the *outgoing* object instance, return the primitive value
that should be used for this field.
"""
try:
return get_attribute(instance, self.source_attrs)
except BuiltinSignatureError as exc:
msg = (
'Field source for `{serializer}.{field}` maps to a built-in '
'function type and is invalid. Define a property or method on '
'the `{instance}` instance that wraps the call to the built-in '
'function.'.format(
serializer=self.parent.__class__.__name__,
field=self.field_name,
instance=instance.__class__.__name__,
)
)
raise type(exc)(msg)
except (KeyError, AttributeError) as exc:
if self.default is not empty:
return self.get_default()
if self.allow_null:
return None
if not self.required:
raise SkipField()
msg = (
'Got {exc_type} when attempting to get a value for field '
'`{field}` on serializer `{serializer}`.\nThe serializer '
'field might be named incorrectly and not match '
'any attribute or key on the `{instance}` instance.\n'
'Original exception text was: {exc}.'.format(
exc_type=type(exc).__name__,
field=self.field_name,
serializer=self.parent.__class__.__name__,
instance=instance.__class__.__name__,
exc=exc
)
)
raise type(exc)(msg)
get_default¶
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise SkipField
, indicating that no value should be set in the
validated data for this field.
View Source
def get_default(self):
"""
Return the default value to use when validating data if no input
is provided for this field.
If a default has not been set for this field then this will simply
raise `SkipField`, indicating that no value should be set in the
validated data for this field.
"""
if self.default is empty or getattr(self.root, 'partial', False):
# No default, or this is a partial update.
raise SkipField()
if callable(self.default):
if getattr(self.default, 'requires_context', False):
return self.default(self)
else:
return self.default()
return self.default
get_default_field_names¶
Return the default list of field names that will be used if the
Meta.fields
option is not specified.
View Source
get_extra_kwargs¶
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
View Source
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = copy.deepcopy(getattr(self.Meta, 'extra_kwargs', {}))
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
if not isinstance(read_only_fields, (list, tuple)):
raise TypeError(
'The `read_only_fields` option must be a list or tuple. '
'Got %s.' % type(read_only_fields).__name__
)
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
else:
# Guard against the possible misspelling `readonly_fields` (used
# by the Django admin and others).
assert not hasattr(self.Meta, 'readonly_fields'), (
'Serializer `%s.%s` has field `readonly_fields`; '
'the correct spelling for the option is `read_only_fields`.' %
(self.__class__.__module__, self.__class__.__name__)
)
return extra_kwargs
get_field_names¶
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the Meta.fields
or
Meta.exclude
options if they have been specified.
View Source
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and fields != ALL_FIELDS and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple or "__all__". '
'Got %s.' % type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
assert not (fields is None and exclude is None), (
"Creating a ModelSerializer without either the 'fields' attribute "
"or the 'exclude' attribute has been deprecated since 3.3.0, "
"and is now disallowed. Add an explicit fields = '__all__' to the "
"{serializer_class} serializer.".format(
serializer_class=self.__class__.__name__
),
)
if fields == ALL_FIELDS:
fields = None
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared in a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name not in self._declared_fields, (
"Cannot both declare the field '{field_name}' and include "
"it in the {serializer_class} 'exclude' option. Remove the "
"field or, if inherited from a parent serializer, disable "
"with `{field_name} = None`."
.format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
get_fields¶
Return the dict of field names -> field instances that should be
used for self.fields
when instantiating the serializer.
View Source
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
if self.url_field_name is None:
self.url_field_name = api_settings.URL_FIELD_NAME
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
extra_field_kwargs = extra_kwargs.get(field_name, {})
source = extra_field_kwargs.get('source', '*')
if source == '*':
source = field_name
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
source, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
get_initial¶
Return a value to use when the field is being returned as a primitive
value, without any object instance.
View Source
def get_initial(self):
if hasattr(self, 'initial_data'):
# initial_data may not be a valid type
if not isinstance(self.initial_data, Mapping):
return OrderedDict()
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
get_unique_for_date_validators¶
Determine a default set of validators for the following constraints:
- unique_for_date
- unique_for_month
- unique_for_year
View Source
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following constraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
get_unique_together_validators¶
Determine a default set of validators for any unique_together constraints.
View Source
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together constraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents)
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_sources = OrderedDict(
(field.field_name, field.source) for field in self._writable_fields
if (field.source != '*') and ('.' not in field.source)
)
# Special Case: Add read_only fields with defaults.
field_sources.update(OrderedDict(
(field.field_name, field.source) for field in self.fields.values()
if (field.read_only) and (field.default != empty) and (field.source != '*') and ('.' not in field.source)
))
# Invert so we can find the serializer field names that correspond to
# the model field names in the unique_together sets. This also allows
# us to check that multiple fields don't map to the same source.
source_map = defaultdict(list)
for name, source in field_sources.items():
source_map[source].append(name)
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
# Skip if serializer does not map to all unique together sources
if not set(source_map).issuperset(unique_together):
continue
for source in unique_together:
assert len(source_map[source]) == 1, (
"Unable to create `UniqueTogetherValidator` for "
"`{model}.{field}` as `{serializer}` has multiple "
"fields ({fields}) that map to this model field. "
"Either remove the extra fields, or override "
"`Meta.validators` with a `UniqueTogetherValidator` "
"using the desired field names."
.format(
model=self.Meta.model.__name__,
serializer=self.__class__.__name__,
field=source,
fields=', '.join(source_map[source]),
)
)
field_names = tuple(source_map[f][0] for f in unique_together)
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=field_names
)
validators.append(validator)
return validators
get_uniqueness_extra_kwargs¶
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
View Source
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
if getattr(self.Meta, 'validators', None) is not None:
return (extra_kwargs, {})
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= {model_field.unique_for_date, model_field.unique_for_month,
model_field.unique_for_year}
unique_constraint_names -= {None}
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(unique_together_list):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
value.update(extra_kwargs[key])
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
get_validators¶
Determine the set of validators to use when instantiating serializer.
View Source
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return list(validators)
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
get_value¶
Given the incoming primitive data, return the value for this field
that should be validated and transformed to a native value.
View Source
include_extra_kwargs¶
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
View Source
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'min_length',
'max_length', 'min_value', 'max_value', 'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
is_valid¶
View Source
def is_valid(self, *, raise_exception=False):
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
run_validation¶
We override the default run_validation
, because the validation
performed by validators and the .validate()
method should
be coerced into an error dictionary with a 'non_fields_error' key.
View Source
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=as_serializer_error(exc))
return value
run_validators¶
Add read_only fields with defaults to value before running validators.
View Source
save¶
View Source
def save(self, **kwargs):
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
assert not hasattr(self, '_data'), (
"You cannot call `.save()` after accessing `serializer.data`."
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
)
validated_data = {**self.validated_data, **kwargs}
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
to_internal_value¶
Dict of native values <- Dict of primitive datatypes.
View Source
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, Mapping):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
}, code='invalid')
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = get_error_detail(exc)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
to_representation¶
View Source
def to_representation(self, instance):
# only return the whole model (weights) if requested
data = serializers.ModelSerializer.to_representation(self, instance)
if not self.context.get("with-weights", False):
del data["weights"]
if self.context.get("with-stats", False):
data["stats"] = self.analyze_torch_model(instance)
if isinstance(instance, GlobalModel):
data["has_preprocessing"] = bool(instance.preprocessing)
return data
update¶
View Source
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
info = model_meta.get_field_info(instance)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
m2m_fields = []
for attr, value in validated_data.items():
if attr in info.relations and info.relations[attr].to_many:
m2m_fields.append((attr, value))
else:
setattr(instance, attr, value)
instance.save()
# Note that many-to-many fields are set after updating instance.
# Setting m2m fields triggers signals which could potentially change
# updated instance and we do not want it to collide with .update()
for attr, value in m2m_fields:
field = getattr(instance, attr)
field.set(value)
return instance
validate¶
validate_empty_values¶
Validate empty values, and either:
- Raise
ValidationError
, indicating invalid data. - Raise
SkipField
, indicating that the field should be ignored. - Return (True, data), indicating an empty value that should be returned without any further validation being applied.
- Return (False, data), indicating a non-empty value, that should have validation applied as normal.
View Source
def validate_empty_values(self, data):
"""
Validate empty values, and either:
* Raise `ValidationError`, indicating invalid data.
* Raise `SkipField`, indicating that the field should be ignored.
* Return (True, data), indicating an empty value that should be
returned without any further validation being applied.
* Return (False, data), indicating a non-empty value, that should
have validation applied as normal.
"""
if self.read_only:
return (True, self.get_default())
if data is empty:
if getattr(self.root, 'partial', False):
raise SkipField()
if self.required:
self.fail('required')
return (True, self.get_default())
if data is None:
if not self.allow_null:
self.fail('null')
# Nullable `source='*'` fields should not be skipped when its named
# field is given a null value. This is because `source='*'` means
# the field is passed the entire object, which is not null.
elif self.source == '*':
return (False, None)
return (True, None)
return (False, data)