How to extend SerializerMetaclass from DRF and apply it to my serializer? - python

Using Python 3.6
In Django Rest framework:
#six.add_metaclass(SerializerMetaclass)
class Serializer(BaseSerializer):
...
class ModelSerializer(Serializer):
I want to extend the SerializerMetaclass:
class JSONDataMetaclass(SerializerMetaclass):
"""Meta class to dynamically add SerializerMethodField and look up JSON data"""
def __new__(cls, name, bases, attrs):
for field in getattr(attrs.get('Meta'), 'json_fields', []):
def closure(field):
def _(self, obj):
if len(field) == 1:
return obj.raw_data.get(field[0])
return obj.raw_data.get(field[1])
return _
attrs[field[0]] = serializers.SerializerMethodField()
attrs['get_{}'.format(field[0])] = closure(field)
return super(JSONDataMetaclass, cls).__new__(cls, name, bases, attrs)
So that my serializer can use my metaclass
In my customer serializer:
#six.add_metaclass(JSONDataMetaclass)
class MySerializer(ModelSerializer):
...
But this has caused some strange behaviors, eg. it doesn't respect the depth defined and serialize all the way down to nested relationships.
What am I doing it wrong?

Related

Django rest framework - set default serializer for a class

In my code, I have a few models with multiple custom properties:
#dataclass
class Value:
amount: float
currency: str
class MyModel(models.Model):
...
#property
def v1(self) -> Value:
...
#property
def v2(self) -> Value:
...
#property
def v3(self) -> Value:
...
An I have the following serializer:
class MyModelBaseSerializer(serializers.ModelSerializer):
class Meta:
model = MyModel
fields = [..., "v1", "v2", "v3"]
When serializing any MyModel instance, a TypeError raises: TypeError: Object of type Value is not JSON serializable.
I know this can be solved by explicitly adding v1, v2, v3 as fields inside MyModelBaseSerializer, but I don't want to do that: I have many models with Value properties. I want the default DRF serializer to know how to serialize Value instances.
I tried overriding to_representation, but that didn't seem to work.
I want something similar to overriding the JSONEncoder.default(o) method, but I don't see how to tell DRF which encoder to use.
Add a custom serializer for Value. For example (not tested).
class ValueSerializer(serializers.Serializer):
amount = serializers.FloatField()
currency = serializers.CharField()
class MyModelBaseSerializer(serializers.ModelSerializer):
v1 = ValueSerializer()
v2 = ValueSerializer()
v3 = ValueSerializer()
...
Or, since Value is a dataclass, take a look at https://github.com/oxan/djangorestframework-dataclasses.
EDIT
A quick and dirty DRY approach can be to override build_property_field on the serializer (see docs).
class MyModelBaseSerializer(serializers.ModelSerializer):
def build_property_field(self, field_name, model_class):
if field_name in ["v1", "v2", "v3"]:
return ValueSerializer, {}
return super().build_property_field(field_name, model_class)
Overriding serializer_field_mapping is another option.

Django: Casting Abstract Models

I have an abstract Django model that I use to create two other models. How can I avoid duplicating code when dealing with the different examples below (for example, when creating a Boxed cereal and a Bowled cereal I would like to avoid duplicating the function twice.
class Cereal(models.Model):
name = models.CharField()
class Meta:
abstract = True
class Boxed(Cereal):
pass
class Bowled(Cereal):
pass
func some_func_boxed(name):
boxed = Boxed.objects.get(id=1)
boxed.name = name
boxed.save()
func some_func_bowled(name):
bowled = Bowled.objects.get(id=1)
bowled.name = name
bowled.save()
def some_func(name, Instance):
i = Instance.objects.get(id=1)
i.name = "some name"
i.save()
The good idea is using strategy pattern, article for example: https://medium.com/#sheikhsajid/design-patterns-in-python-part-1-the-strategy-pattern-54b24897233e
You can add this as a #classmethod on the Cereal model:
class Cereal(models.Model):
name = models.CharField()
#classmethod
def some_func_cereal(cls, name):
bowled = cls.objects.get(id=1)
bowled.name = name
bowled.save()
class Meta:
abstract = True
You can then call this method with:
Boxed.some_func_cereal('Captain Crunch')
Bowled.some_func_cereal('Lucky Charms')
The class with which you call the class method, is passed as the cls parameter in the some_func_cereal function.
You could add an update method to your Cereal absract class such as:
class Cereal:
def update(self, **kwargs):
for key, val in kwargs.items():
setattr(self, key, val)
self.save(update_fields=kwargs.keys())
return self
and use it as follows to update any column you like
BoxedInstance.update(name="new name")
or
BoxedInstance.update(name="new name", taste="cardboardy")

Django: How do i filter to return only those candidates that have paid >=10000

Here is my model.py
class Candidate(models.Model):
person = models.OneToOneField(
Person, related_name='person_candidate', on_delete=models.PROTECT)
def __str__(self):
return str(self.person)
#property
def total_candidate_votes(self):
return self.candidate_votes.filter(candidate=self).count()
#property
def amount_paid(self):
return self.candidate_payments.aggregate(models.Sum('fees'))['fees__sum'] or 0
#property
def is_qualified_to_vie(self):
return self.amount_paid >= 10000
Help me create a filter that will show candidates who have only paid >=10000
Filter.py
class CandidateFilter(django_filters.FilterSet):
name = django_filters.CharFilter(lookup_expr='iexact', name='person__first_name')
is_qualified_to_vie = django_filters.BooleanFilter(method='filter_by_qualified_candidates')
def filter_by_qualified_candidates(self, queryset, field, value):
return queryset.filter
The problem is that python properties can't be translated into django filter expressions. I'd recommend using custom queryset methods in addition to the python properties here. Something like the below:
class CandidateQuerySet(models.QuerySet):
def annotate_amount_paid(self):
return self.annotate(amount_paid=models.Sum('candidate_payments__fees'))
def qualified_to_vie(self, yes=True):
# eg, Candidate.objects.qualified_to_vie()
qs = return self.annotate_amount_paid()
if yes:
return qs.filter(amount_paid__gte=10000)
return qs.filter(amount_paid__lt=10000)
class Candidate(models.Model):
...
objects = CandidateQuerySet.as_manager()
From here, it's fairly straightforward.
class CandidateFilter(filters.FilterSet):
is_qualified_to_vie = django_filters.BooleanFilter(method='filter_by_qualified_candidates')
def filter_by_qualified_candidates(self, queryset, name, value):
return queryset.qualified_to_vie(value)
Note that the above is just the gist of the idea and will probably require some changes in order to actually function.

Django override model subclass create method

I'm looking for right ways to override the create() method of a subclass.
Basically I'm doing this:
class Base(models.Model):
field1_base = models.IntegerField()
def __init__(self, field1_base):
# LOGICS
self.field1_base = field1_base
class A(Base):
field2_sub = models.IntegerField()
def __init__(self, field2_sub, field1_base):
# LOGICS
self.field2_sub = field2_sub
super(A, self).__init__(field1_base)
A(field2_sub=1, field1_base=2)
However we can't override the __init__() method of a model.
I just want to leave some fields to be assigned in base class's methods.
Is there a proper way to do this using create() method?
Of course I can custom create method of Base class, however what I want is to invoke Base.create and A.create at the same time on the creation of A, which makes the situation different from this question
I would do something like this.
class Base(models.Model):
field1_base = models.IntegerField()
def initialize(self, *args, **kwargs):
self.field1_base = kwargs['field1_base']
#classmethod
def create(cls, *args, **kwargs):
# LOGICS
self = cls()
self.initialize(*args, **kwargs)
return self
class A(Base):
field2_sub = models.IntegerField()
def initialize(self, *args, **kwargs):
super(A, self).initialize(*args, **kwargs)
self.field2_sub = kwargs['field1_base']
A.create(field2_sub=1, field1_base=2)

Dealing with metaclass conflict with SQL Alchemy declarative base

I have a class X which derives from a class with its own metaclass Meta. I want to also derive X from the declarative base in SQL Alchemy. But I can't do the simple
def class MyBase(metaclass = Meta):
#...
def class X(declarative_base(), MyBase):
#...
since I would get metaclass conflict error: 'the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases'. I understand that I need to create a new metaclass that would derive from both Meta and from whatever metaclass the declarative base uses (DeclarativeMeta I think?). So is it enough to write:
def class NewMeta(Meta, DeclarativeMeta): pass
def class MyBase(metaclass = NewMeta):
#...
def class X(declarative_base(), MyBase):
#...
I tried this, and it seems to work; but I'm afraid I may have introduced some problem with this code.
I read the manual, but it's a bit too cryptic for me. What's
EDIT:
The code used for my classes is as follows:
class IterRegistry(type):
def __new__(cls, name, bases, attr):
attr['_registry'] = {}
attr['_frozen'] = False
print(name, bases)
print(type(cls))
return type.__new__(cls, name, bases, attr)
def __iter__(cls):
return iter(cls._registry.values())
class SQLEnumMeta(IterRegistry, DeclarativeMeta): pass
class EnumType(metaclass = IterRegistry):
def __init__(self, token):
if hasattr(self, 'token'):
return
self.token = token
self.id = len(type(self)._registry)
type(self)._registry[token] = self
def __new__(cls, token):
if token in cls._registry:
return cls._registry[token]
else:
if cls._frozen:
raise TypeError('No more instances allowed')
else:
return object.__new__(cls)
#classmethod
def freeze(cls):
cls._frozen = True
def __repr__(self):
return self.token
#classmethod
def instance(cls, token):
return cls._registry[token]
class C1(Base, EnumType, metaclass = SQLEnumMeta):
__tablename__ = 'c1'
#...
Edit: Now having looked at IterRegistry and DeclarativeMeta, I think you're code is okay.
IterRegistry defines __new__ and __iter__, while DeclarativeMeta defines __init__ and __setattr__. Since there is no overlap, there's no direct need to call super. Nevertheless, it would good to do so, to future-proof your code.
Do you have control over the definition of Meta? Can you show us its definition? I don't think we can say it works or does not work unless we see the definition of Meta.
For example, there is a potential problem if your Meta does not call
super(Meta,cls).__init__(classname, bases, dict_)
If you run this code
class DeclarativeMeta(type):
def __init__(cls, classname, bases, dict_):
print('DeclarativeMeta')
# if '_decl_class_registry' in cls.__dict__:
# return type.__init__(cls, classname, bases, dict_)
# _as_declarative(cls, classname, dict_)
return type.__init__(cls, classname, bases, dict_)
class Meta(type):
def __init__(cls, classname, bases, dict_):
print('Meta')
return type.__init__(cls, classname, bases, dict_)
class NewMeta(Meta,DeclarativeMeta): pass
class MyBase(object):
__metaclass__ = NewMeta
pass
Then only the string 'Meta' gets printed.
In other words, only Meta.__init__ gets run. DeclarativeMeta.__init__ gets skipped.
On the other hand, if you define
class Meta(type):
def __init__(cls, classname, bases, dict_):
print('Meta')
return super(Meta,cls).__init__(classname, bases, dict_)
Then both Meta.__init__ and DeclarativeMeta.__init__ gets run.

Categories