What is the best way to convert a sqlalchemy model to a pydantic schema (model) if it includes an enum field?
Sqlalchemy
import enum
from sqlalchemy import Enum, Column, String
from sqlalchemy.orm import declarative_base
Base = declarative_base()
class StateEnum(enum.Enum):
CREATED = 'CREATED'
UPDATED = 'UPDATED'
class Adapter(Base):
__tablename__ = 'adapters'
id = Column(String, primary_key=True)
friendly_name = Column(String(256), nullable=False)
state: StateEnum = Column(Enum(StateEnum))
Pydantic
from pydantic import BaseModel
from enum import Enum
class StateEnumDTO(str, Enum):
CREATED = 'CREATED'
UPDATED = 'UPDATED'
class AdapterDTO(BaseModel):
friendly_name: str
state: StateEnumDTO # This currently cannot be converted?
class Config:
allow_population_by_field_name = True
orm_mode = True
use_enum_values = True
Conversion
AdapterDTO.from_orm(Adapter(friendly_name='test', state=StateEnum.CREATED))
This leads to the error
value is not a valid enumeration member; permitted: 'CREATED', 'UPDATED' (type=type_error.enum; enum_values=[<StateEnumDTO.CREATED: 'CREATED'>, <StateEnumDTO.UPDATED: 'UPDATED'>])
How can I configure either
a.) the serialization with the from_orm method?
or
b.) the creation of the state field?
c.) How to convert it the other way around?
Is there a native way to do this with pydantic or how is this typically done?
Update:
Test case
def test_enum_conversion_to_dto():
adapter = Adapter(id='1', friendly_name='test', state=StateEnum.CREATED)
adapter_dto = AdapterDTO.from_orm(adapter)
assert adapter_dto.state == StateEnumDTO.CREATED
assert adapter_dto.state.value == StateEnum.CREATED.value
Pydantic requires that both enum classes have the same type definition.
In your case, StateEnum inherits from enum.Enum, but StateEnumDTO inherits from both str and enum.Enum.
You can fix this issue by changing your SQLAlchemy enum definition:
class StateEnum(str, enum.Enum):
CREATED = 'CREATED'
UPDATED = 'UPDATED'
You must add arbitrary_types_allowed = True
To the model Config class.
from pydantic import BaseModel
from enum import Enum
class StateEnumDTO(str, Enum):
CREATED = 'CREATED'
UPDATED = 'UPDATED'
class AdapterDTO(BaseModel):
friendly_name: str
state: StateEnumDTO # This currently cannot be converted?
class Config:
allow_population_by_field_name = True
orm_mode = True
use_enum_values = True
arbitrary_types_allowed = True
Related
So i am trying to use an existing pydantic model in another pydantic model, as i've seen on the examples.
Those are my pydantic models:
class DriverCategoryOut(BaseModel):
internal_id: int
category: str
class Config:
orm_mode = True
class DocListOut(BaseModel):
driver_categories: DriverCategoryOut
class Config:
orm_mode = True
This is my route code:
#router.get('/document', response_model=shemas.DocListOut)
def get_doc_list(db: Session = Depends(get_db)):
driver_categories = db.query(DriverCategory).first()
return driver_categories
I'm getting the error:
pydantic.error_wrappers.ValidationError: 1 validation error for DocListOut
response -> driver_categories
field required (type=value_error.missing)
If I change to response_model=shemas.DriverCategoryOut it works just fine. What is wrong with my DocListOut model?
This line driver_categories = db.query(DriverCategory).first() retrieves (and parses a record as a) DriverCategory. Assuming that maps perfectly to DriverCategoryOut, why would this be automatically parsed to an object that has a property that is of type DriverCategoryOut?
You might want to try the following:
#router.get('/document', response_model=shemas.DocListOut)
def get_doc_list(db: Session = Depends(get_db)):
driver_categories = db.query(DriverCategory).first()
return {'driver_categories': driver_categories}
Now, that JSON will be parsed to your response_model (in this case, a DocListOut).
there is another way to inheritance your model
class DocListOut(DriverCategoryOut):
class Config:
orm_mode = True
# if you want to exclude some fields
fields = {
"internal_id": {'exclude': True},
}
if your output are lists
class DocListOut(BaseModel):
driver_category: List[DriverCategoryOut] = []
class Config:
orm_mode=True
I am trying to change the alias_generator and the allow_population_by_field_name properties of the Config class of a Pydantic model during runtime. I am expecting it to cascade from the parent model to the child models.
class ParentModel(BaseModel):
class Config:
alias_generator = to_camel
allow_population_by_field_name = True
class ChildModel(ParentModel):
first_name: str
class ChildModel2(ParentModel):
data: ChildModel
What I want to do is to change the following during runtime.
alias_generator = to_pascal
How would I be able to do that?
I tried something like this:
class ParentModel(BaseModel):
def __init__(self, is_camel = True, **data):
self.Config.alias_generator = to_camel if is_camel else to_pascal
class Config:
allow_population_by_field_name = True
class ChildModel(ParentModel):
first_name: str
class ChildModel2(ParentModel):
def __init__(self, is_camel, **data):
super.__init__(is_camel, **data)
data: ChildModel
test = ChildModel2(is_camel=False, data=ChildModel(first_name="test"))
So the goal is to convert the pydantic models to json in either camel case or pascal case or snake case when the fast api returns the response.
I try to write tests for my FastAPI application but I get some import errors.
I'm trying to do very simple testing for my models, e.g.:
models/example.py:
class ExampleDbModel(ExampleBase, table=True):
__tablename__ = "example"
id: str
name: str
relation_id: str = Field(foreign_key="another_example.id")
...
relation: AnotherExampleDbModel = Relationship()
class AnotherExampleDbModel(AnotherExampleBase, table=True):
__tablename__ = "another_example"
id: str
some_field: str
relation_id: str = Field(foreign_key="third_example.id")
...
relation: ThirdExampleDbModel = Relationship()
tests/test_example.py:
def test_example():
example = ExampleDbModel(name="test")
fields = [
"id",
"name",
...
]
class_fields = example.dict().keys()
diff = set(fields) ^ set(list(class_fields))
assert not diff
This gives me an error: sqlalchemy.exc.InvalidRequestError: Table 'third_example' is already defined for this MetaData instance. Specify 'extend_existing=True' to redefine options and columns on an existing Table object.. Am I right when assuming it's because the model AnotherExampleDbModel has its own fk relation to another table? How could I test a model that has relations to another table (which has relations to another table)?
Is it possible to pass function setters for immutable Pydantic Models.
For example:
from uuid import uuid4, UUID
from pydantic import BaseModel
from datetime import datetime
def generate_uuid():
return uuid4()
def get_datetimenow():
return datetime.now()
class Item(BaseModel):
class Config:
allow_mutation = False
extra = "forbid"
id: UUID
created_at: datetime
I want the methods generate_uuid and get_datetimenow to set the attributes.
>>> Item()
ValidationError: 2 validation errors for Item
id
field required (type=value_error.missing)
created_at
field required (type=value_error.missing)
While I want to get an Item object with id and created_at automatically set. Identical result as when you run:
>>> Item(id=generate_uuid(), created_at=get_datetimenow())
Item(id=UUID('8f898730-3fad-4ca9-9667-c090f62a2954'), created_at=datetime.datetime(2021, 1, 19, 21, 13, 7, 58051))
You can use default_factory parameter of Field with an arbitrary function. Like so:
from uuid import uuid4, UUID
from pydantic import BaseModel, Field
from datetime import datetime
class Item(BaseModel):
class Config:
allow_mutation = False
extra = "forbid"
id: UUID = Field(default_factory=uuid4)
created_at: datetime = Field(default_factory=datetime.now)
In my Rest application I want to return json like JSONAPI format, but I need to create Schema class for it and create every field again that are already there in my model. So instead of creating every field in schema class can I not take it from DB Model..
below is my model class
class Author(db.Model):
id = db.Column(db.Integer)
name = db.Column(db.String(255))
I am defining Schema like below.
class AuthorSchema(Schema):
id = fields.Str(dump_only=True)
name = fields.Str()
metadata = fields.Meta()
class Meta:
type_ = 'people'
strict = True
So here, id and name I have defined it twice. so is there any option in marshmallow-jsonapi to assign model name in schema class so it can take all fields from model
Note: I am using marshmallow-jsonapifor it, I have tried marshmallow-sqlalchemy , it has that option but it not return json in JSONAPI format
You can use flask-marshmallow's ModelSchema and marshmallow-sqlalchemy in combination with marshmallow-jsonapi with the caveat that you have to subclass not only the Schema classes but also the SchemaOpts classes, like this:
# ...
from flask_marshmallow import Marshmallow
from marshmallow_jsonapi import Schema, SchemaOpts
from marshmallow_sqlalchemy import ModelSchemaOpts
# ...
ma = Marshmallow(app)
# ...
class JSONAPIModelSchemaOpts(ModelSchemaOpts, SchemaOpts):
pass
class AuthorSchema(ma.ModelSchema, Schema):
OPTIONS_CLASS = JSONAPIModelSchemaOpts
class Meta:
type_ = 'people'
strict = True
model = Author
# ...
foo = AuthorSchema()
bar = foo.dump(query_results).data # This will be in JSONAPI format including every field in the model