I'm interested if there is any way to set an attribute on the Pydantic Model class the will only be used when constructing the output representation of the object.
Something that works like Django Rest Framework SerializerMethodField. It is highly useful when you need to construct output depending on the environment or many other conditions.
It's possible to have a similar mechanism as django's SerializerMethodField by overriding pydantic BaseModel class
from copy import deepcopy
from pydantic import BaseModel, Field
class Person:
def __init__(self, name, likes_cake):
self.name = name
self.likes_cake = likes_cake
class CustomPydanticModel(BaseModel):
#classmethod
def from_orm(cls, obj, getter_binding=None):
getter_binding = getter_binding or {}
obj = deepcopy(obj)
for field in cls.__fields__:
method = getter_binding.get(field)
if method is None:
method = getattr(cls, f"get_{field}", None)
if method is not None and callable(method):
setattr(obj, field, method(obj))
return super().from_orm(obj)
class PersonModel(CustomPydanticModel):
name: str
status: str | None = None
#staticmethod
def get_status(obj):
return "I like cake" if obj.likes_cake else "Hello"
class Config:
orm_mode = True
obj = Person("Patrick", True)
pydantic_obj = PersonModel.from_orm(obj)
Note that the get_status method should be static or classmethod
Related
I want to make a attribute private but with a pydantic field:
from pydantic import BaseModel, Field, PrivateAttr, validator
class A(BaseModel):
_a: str = "" # I want a pydantic field for this private value.
_computed_from_a: str = PrivateAttr(default="")
#property
def a(self):
return self._a
#a.setter
def a(self,v):
self._a = v
self._computed_from_a = "b" + self._a
# This returns a type Field<...> which crashes...
assert isinstance(A().a, str)
I think I cannot name a field _ with underscore because pydantic does black magic underneath. How can I achieve a private field _a which has defined setters and getters, where the getter computes some other value _computed_from_a.
I am trying to change the alias_generator and the allow_population_by_field_name properties of the Config class of a Pydantic model during runtime. I am expecting it to cascade from the parent model to the child models.
class ParentModel(BaseModel):
class Config:
alias_generator = to_camel
allow_population_by_field_name = True
class ChildModel(ParentModel):
first_name: str
class ChildModel2(ParentModel):
data: ChildModel
What I want to do is to change the following during runtime.
alias_generator = to_pascal
How would I be able to do that?
I tried something like this:
class ParentModel(BaseModel):
def __init__(self, is_camel = True, **data):
self.Config.alias_generator = to_camel if is_camel else to_pascal
class Config:
allow_population_by_field_name = True
class ChildModel(ParentModel):
first_name: str
class ChildModel2(ParentModel):
def __init__(self, is_camel, **data):
super.__init__(is_camel, **data)
data: ChildModel
test = ChildModel2(is_camel=False, data=ChildModel(first_name="test"))
So the goal is to convert the pydantic models to json in either camel case or pascal case or snake case when the fast api returns the response.
I have an abstract Django model that I use to create two other models. How can I avoid duplicating code when dealing with the different examples below (for example, when creating a Boxed cereal and a Bowled cereal I would like to avoid duplicating the function twice.
class Cereal(models.Model):
name = models.CharField()
class Meta:
abstract = True
class Boxed(Cereal):
pass
class Bowled(Cereal):
pass
func some_func_boxed(name):
boxed = Boxed.objects.get(id=1)
boxed.name = name
boxed.save()
func some_func_bowled(name):
bowled = Bowled.objects.get(id=1)
bowled.name = name
bowled.save()
def some_func(name, Instance):
i = Instance.objects.get(id=1)
i.name = "some name"
i.save()
The good idea is using strategy pattern, article for example: https://medium.com/#sheikhsajid/design-patterns-in-python-part-1-the-strategy-pattern-54b24897233e
You can add this as a #classmethod on the Cereal model:
class Cereal(models.Model):
name = models.CharField()
#classmethod
def some_func_cereal(cls, name):
bowled = cls.objects.get(id=1)
bowled.name = name
bowled.save()
class Meta:
abstract = True
You can then call this method with:
Boxed.some_func_cereal('Captain Crunch')
Bowled.some_func_cereal('Lucky Charms')
The class with which you call the class method, is passed as the cls parameter in the some_func_cereal function.
You could add an update method to your Cereal absract class such as:
class Cereal:
def update(self, **kwargs):
for key, val in kwargs.items():
setattr(self, key, val)
self.save(update_fields=kwargs.keys())
return self
and use it as follows to update any column you like
BoxedInstance.update(name="new name")
or
BoxedInstance.update(name="new name", taste="cardboardy")
from flask_sqlalchemy import SQLAlchemy
import datetime
db = SQLAlchemy()
class BaseModel(db.Model):
"""Base data model for all objects"""
__abstract__ = True
def __init__(self, *args):
super(self).__init__(*args)
def __repr__(self):
"""Define a base way to print models"""
return '%s(%s)' % (self.__class__.__name__, {
column: value
for column, value in self._to_dict().items()
})
def json(self):
"""Define a base way to jsonify models, dealing with datetime objects"""
return {
column: value if not isinstance(value, datetime.date) else value.strftime('%Y-%m-%d')
for column, value in self._to_dict().items()
}
class Station(BaseModel, db.Model):
"""Model for the stations table"""
__tablename__ = 'stations'
id = db.Column(db.Integer, primary_key = True)
lat = db.Column(db.Float)
lng = db.Column(db.Float)
TypeError: super() argument 1 must be type, not Station
I know super with no argument is only used in python3. but in my case what should i fill in the super()? Also is it okay to put the super() in init of the father class(basemodel)?
Since you're not doing anything in BaseModel.__init__, the correct approach is to not implement it at all. Without __init__ defined in BaseModel, you'll go to the super class's __init__ automatically, and more efficiently.
That said, if you are doing something meaningful in BaseModel.__init__, you can't use one-argument super like that. One-argument super is of very limited use (basically, only for classmethods, where you're passing the type but not an instance of the type as the argument). You need two-argument super, explicitly providing the name of the current class you're trying to bypass looking for superclasses, then self, e.g:
class BaseModel(db.Model):
"""Base data model for all objects"""
__abstract__ = True
def __init__(self, *args):
super(BaseModel, self).__init__(*args)
I have a generic set of attributes (e.g. of type string and integer) and I would like to use the following django models to store them:
class Attribute(models.Model):
item = models.ForeignKey('Item')
class Meta:
abstract = True
#staticmethod
def get_subclass_by_type(type):
TEXT_TYPES = ["text", "string"]
INTEGER_TYPES = ["integer", "number"]
if type in TEXT_TYPES:
return TextAttribute
if type in INTEGER_TYPES:
return IntegerAttribute
class TextAttribute(Attribute):
value = models.CharField(max_length=128)
class IntegerAttribute(Attribute):
value = models.IntegerField()
Is there any clean & simple way I've missed to define the types in the subclasses directly? Something like:
class Attribute(models.Model):
item = models.ForeignKey('Item')
class Meta:
abstract = True
#staticmethod
def get_subclass_by_type(type):
<do something>
class TextAttribute(Attribute):
TYPES = ["text", "string"]
value = models.CharField(max_length=128)
class IntegerAttribute(Attribute):
TYPES = ["integer", "number"]
value = models.IntegerField()
You can try to define your method get_subclass_by_type like the following
#classmethod
def get_subclass_by_type(cls, type):
for subcls in cls.__subclasses__():
if type in subcls.TYPE:
return subcls
Haven't tested this code, but i think it should work.