Getting the following error when I launch a query against a table defined with dataclasses using the full declarative method (https://docs.sqlalchemy.org/en/14/orm/declarative_styles.html#example-two-dataclasses-with-declarative-table):
sqlalchemy.exc.DBAPIError: (sqlalchemy.dialects.postgresql.asyncpg.Error) <class 'asyncpg.exceptions.DataError'>: invalid input for query argument $1: Field(name='ticker',type=<class 'str'>,d... (expected str, got Field)
I am using the column_property as follows:
average_price:float = field(metadata={'sa':column_property(
select(
func.avg(
Transaction.price)
)
.where(
(Transaction.user_id==user_id)
)
.correlate_except(Transaction)
.scalar_subquery())})
Full code including table definitions:
import uuid, enum, datetime
from dataclasses import dataclass, field
from sqlalchemy import Column, DateTime, Float, String, Integer, ForeignKey, Numeric, Enum, Date, select, func, exists
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import registry, column_property
Base = registry()
#Base.mapped
#dataclass
class User:
__tablename__ = "user_model"
__sa_dataclass_metadata_key__ = "sa"
id: uuid.UUID = field(
init=False,
default_factory=uuid.uuid4,
metadata={"sa": Column(UUID(as_uuid=True), primary_key=True)},
)
email: str = field(
metadata={"sa": Column(String(254), nullable=False, unique=True, index=True)}
)
user_name: str = field(
metadata={"sa": Column(String(254), nullable=True, unique=True, index=True)}
)
hashed_password: str = field(metadata={"sa": Column(String(128), nullable=False)})
#Base.mapped
#dataclass
class Transaction:
__tablename__ = "transactions_model"
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={'sa': Column(Integer, primary_key=True)})
user_id: uuid.UUID = field(
metadata={"sa": Column(ForeignKey("user_model.id", ondelete="CASCADE"))}
)
item: str = field(metadata={'sa': Column(String(50), nullable=False)})
number: int = field(metadata={'sa': Column(Integer, nullable=False)})
price: Numeric(10,2) = field(metadata={'sa': Column(Numeric(10,2), nullable=False)})
transact_time: DateTime = field(metadata={'sa': Column(DateTime, nullable = False)})
#Base.mapped
#dataclass
class UserItems:
__tablename__ = "owner_model"
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={'sa': Column(Integer, primary_key=True)})
user_id: uuid.UUID = field(
metadata={"sa": Column(ForeignKey("user_model.id", ondelete="CASCADE"))}
)
item: str = field(metadata={'sa': Column(String(50), nullable=False)})
number_owned: int = field(metadata={'sa': Column(Integer, nullable=False)})
average_price:float = field(metadata={'sa':column_property(
select(
func.avg(
Transaction.price)
)
.where(
(Transaction.user_id==user_id)
)
.correlate_except(Transaction)
.scalar_subquery())})
Querying the table returns the error shown. I tried querying the table as:
items = await session.execute(
select(UserItems)
.where(UserItems.user_id == current_user.id)
return items.scalars().all()
I have a suspicion SQLAlchemy is having trouble when I pass the column defined by field, but I have no idea how else to do the same.
Thanks in advance.
Related
when I try to get organization by id (inn) in devices_list field is null (device is exist):
{
inn: 8481406044,
organization_name: "slava bebrow",
devices_list: null
}
models.py:
class Organization(Base):
__tablename__ = "organizations_table"
inn = Column(BigInteger, primary_key=True, index=False)
organization_name = Column(String, nullable=False, unique=True)
devices = relationship("Device", backref="organizations_table")
class Device(Base):
__tablename__ = "devices_table"
uuid = Column(String, primary_key=True, index=False)
device_name = Column(String, nullable=False, unique=True)
organization_id = Column(BigInteger, ForeignKey("organizations_table.inn"), nullable=True)
pydantic_models.py:
class OrganizationBase(BaseModel):
inn: int
organization_name: str
class Config:
orm_mode = True
class Organization(OrganizationBase):
devices_list: list['DeviceBase'] = None
class DeviceBase(BaseModel):
uuid: str
device_name: str
organization_id: int | None
class Config:
orm_mode = True
functions to get organization:
def get_organization(db: Session, organization_id: int):
db_organization = db.query(models.Organization).filter(models.Organization.inn == organization_id).first()
if db_organization is None:
raise HTTPException(status_code=404, detail="Organization not found")
return db_organization
when i try to print(db_organization.devices[0].uuid) i get a list with objects
#app.get("/organizations/{organization_id}", response_model=pydantic_models.Organization)
def get_organization(organization_id, db: Session = Depends(get_db)):
return al.get_organization(db=db, organization_id=organization_id)
I think problem is in pydantic model, but i don't know how to fix it.
I expect a list of devices in field, not null
I am trying to figure out how to use typehints properly with sqlalchemy.
I have 2 Models:
from sqlalchemy.orm import declarative_base
Base = declarative_base()
class Person(Base):
__table__ = "persons"
id: int = Column(Integer, primary_key=True)
name: str = Column(String, nullable=False)
age: str | None = Column(String)
pets: list[Animal] = relationship(
"Animal",
back_populates="persons",
lazy="selectin",
)
class Animal(Base):
__table__ = "animals"
id: int = Column(Integer, primary_key=True)
weight: float | None = Column(Float)
max_age: int | None = Column(Integer)
owner: Person = relationship(
"Person",
back_populates="animals",
lazy="selectin",
)
Now I want to mark both of them as dataclass to have an __init__ method with typehints. Now my question is if that is a good idea, because both (Base and dataclass) do some “magic” stuff underneath, and I want to know if this could cause any Issues.
Also, the default parameters would now be Column, relationship etc.
Maybe there are other (simpler) options to achieve this?
There is no Base when registering dataclasses, it uses the mapper_registry.mapped decorator (docs):
Here are your classes as declaratively mapped dataclasses, however I corrected the relationships which did not work as you wrote them.
from __future__ import annotations
from dataclasses import dataclass, field
from sqlalchemy import Column, Float, ForeignKey, Integer, String
from sqlalchemy.orm import registry, relationship
mapper_registry = registry()
#mapper_registry.mapped
#dataclass
class Person:
__tablename__ = "persons" # NOTE: __tablename__ not __table
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
name: str = field(init=False, metadata={"sa": Column(String, nullable=False)})
age: str | None = field(init=False, metadata={"sa": Column(String)})
pets: list[Animal] = field(
default_factory=list,
metadata={
"sa": relationship(
"Animal",
back_populates="owner",
lazy="selectin",
)
},
)
#mapper_registry.mapped
#dataclass
class Animal:
__tablename__ = "animals"
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
weight: float | None = field(init=False, metadata={"sa": Column(Float)})
max_age: int | None = field(init=False, metadata={"sa": Column(Integer)})
owner_id: id = field(init=False, metadata={"sa": Column(ForeignKey("persons.id"))})
owner: Person = field(
init=False,
metadata={
"sa": relationship(
"Person",
back_populates="pets",
lazy="selectin",
)
},
)
I'm looking for a way to implement SQLalchemy's On_conflict_Do_update feature on one to many relationship tables. I have looked at multiple examples but all of them show on single table implementation. But I am looking for an example where we can perform On_conflict_Do_update on multiple tables with relationship and auto generated id.
Here is my code:
from __future__ import annotations
from contextlib import contextmanager
from dataclasses import dataclass, field, asdict, is_dataclass
from functools import lru_cache
from timeit import default_timer
from typing import List
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.orm import registry, relationship, scoped_session, sessionmaker
engine = create_engine(
"postgresql+psycopg2://postgres:postgres#localhost:5432/postgres", echo=True
)
mapper_registry = registry()
#mapper_registry.mapped
#dataclass
class User:
__tablename__ = "user"
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
name: str = field(default=None, metadata={"sa": Column(String(50))})
nickname: str = field(default=None, metadata={"sa": Column(String(12))})
addresses: List[Address] = field(
default_factory=list, metadata={"sa": relationship("Address")}
)
#mapper_registry.mapped
#dataclass
class Address:
__tablename__ = "address"
__sa_dataclass_metadata_key__ = "sa"
id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
user_id: int = field(init=False, metadata={"sa": Column(ForeignKey("user.id"))})
email_address: str = field(default=None, metadata={"sa": Column(String(50))})
#contextmanager
def get_session(cleanup=False):
session = scoped_session(
sessionmaker(
autocommit=False, autoflush=False, expire_on_commit=False, bind=engine
)
)
mapper_registry.metadata.create_all(engine)
try:
yield session
except Exception:
session.rollback()
finally:
session.close()
if cleanup:
mapper_registry.metadata.drop_all(engine)
def main():
with get_session(cleanup=False) as session:
data = [
User(
name="Matthew",
nickname="Matt",
addresses=[Address(email_address="matt123#abc.com")],
),
User(
name="Micheal",
nickname="Mike",
addresses=[Address(email_address="mike123#abc.com")],
),
]
session.add_all(data)
session.commit()
if __name__ == "__main__":
START_TIME = default_timer()
main()
elapsed = default_timer() - START_TIME
time_completed_at = "{:5.2f}s".format(elapsed)
print("Total Time to finish the job: {0}".format(time_completed_at))
Any Help is Appreciated.
Thanks
I want to map backer_id which is a primary key in my model to all the information related to that user (as defined in UserInProject schema) using Pydantic.
Pydantic file:
class UserInProject(BaseModel):
email: EmailStr
full_name: str
id: int
class Config:
orm_mode = True
class TransactionBase(BaseModel):
quantity: int
amount: float
currency: Currency
class TransactionIn(TransactionBase):
project_id: int
class TransactionOut(BaseModel):
id: int
date_ordered: datetime
backer: "UserInProject"
My model:
class BackerProjectOrder(Base):
__tablename__ = "backers_projects_orders"
id = Column(
Integer, primary_key=True, index=True, autoincrement=True, nullable=False
)
backer_id = Column(ForeignKey("users.id"), index=True, primary_key=True)
...
My API:
#router.post(
"/", status_code=status.HTTP_201_CREATED, response_model=schema.TransactionOut
)
def create_transaction(
transaction: schema.TransactionIn,
db: Session = Depends(get_db),
current_user: models.User = Depends(get_current_user),
):
new_transaction = models.BackerProjectOrder(**transaction_dict, backer_id = current_user.id)
db.add(new_transaction)
db.commit()
db.refresh(new_transaction)
And it currently gives me this error:
pydantic.error_wrappers.ValidationError: 1 validation error for TransactionOut
response -> backer
field required (type=value_error.missing)
How can I ask Pydantic to map the backer_id field to the UserInProject schema? Is it possible at all?
I have some pydantic and SQLalchemy models
game_users = Table('game_users', Base.metadata,
Column('game_id', ForeignKey('games.id'), primary_key=True),
Column('user_id', ForeignKey('users.id'), primary_key=True)
)
class Game(Base):
__tablename__ = 'games'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
users = relationship("User", secondary="game_users", back_populates='games')
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
age = Column(Integer)
email = Column(String, nullable=False, unique=True)
games = relationship("Game", secondary="game_users", back_populates='users')
class UserBase(BaseModel):
id: int
name: str
age: int = Query(ge=0, le=100)
email: str
class Config:
orm_mode = True
class GameBase(BaseModel):
id: int
name: str
class Config:
orm_mode = True
class UsersOut(UserBase):
games: List[GameBase]
class GamesOut(GameBase):
users: List[UserBase]
And I need to add entries by POST method, but I don't know how to do it exactly. I tried something like this:
#app.post('/connect/{uid}/{gid}')
def connect_to_game(uid: int, gid: int, db: Session = Depends(get_db)):
game = db.query(Game).filter(Game.id == gid).first()
user = db.query(User).filter(User.id == uid).first()
user_games = user(games=[game.id])
game_users = game(users=[user.id])
db.add_all([user_games, game_users])
db.commit()
return f'{game.name} successfully connected to {user.name}'
But it, certainly, doesn't work.
I tried to find information in pydantic and FastAPI documentations, but I couldn't. So I'll be really appreciated for any help or ideas.
I define models as:
game_users = Table('game_users', DBBase.metadata,
Column('game_id', ForeignKey('games.id'), primary_key=True),
Column('user_id', ForeignKey('users.id'), primary_key=True)
)
class Game(DBBase):
__tablename__ = 'games'
id = Column(Integer, primary_key=True)
name = Column(String(10), nullable=False)
users = relationship("User", secondary=game_users, backref='games')
class User(DBBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(String(10), nullable=False)
simplize back_populates by backref.
then assosicate objects:
>>> g1 = Game(name='g1')
>>> g2 = Game(name='g2')
>>> u1 = User(name='u1')
>>> u2 = User(name='u2')
>>> g1.users.extend([u1,u2])
>>> g2.users.extend([u1,u2])
>>> g1.users
[<models.User object at 0x105166b50>, <models.User object at 0x10519cfa0>]
>>> u1.games
[<models.Game object at 0x10512a280>, <models.Game object at 0x105166130>]
>>> s.add_all([g1,g2,u1,u2]) # s is db session
>>> s.commit()
So you just need to use the same type of adding relationships as above in your post method:
#app.post('/connect/{uid}/{gid}')
def connect_to_game(uid: int, gid: int, db: Session = Depends(get_db)):
game = db.query(Game).filter(Game.id == gid).first()
user = db.query(User).filter(User.id == uid).first()
game.users.append(user)
# no new instance, no need `add_all`
# db.add_all([user_games, game_users])
db.commit()
return f'{game.name} successfully connected to {user.name}'
Reference sqlalchemy many to many