Why am I getting "InvalidRequestError"? - python

I'm trying to implement some foreign keys between tables and query using joins as per this ORM tutorial:
from sqlalchemy import (Column, ForeignKey, Integer, create_engine)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
Base = declarative_base()
class DataAccessLayer():
def __init__(self):
conn_string = "mysql+mysqlconnector://root:root#localhost/"
self.engine = create_engine(conn_string)
def create_session(self):
Base.metadata.create_all(self.engine)
Session = sessionmaker()
Session.configure(bind=self.engine)
self.session = Session()
class Bet(Base):
__tablename__ = "bet"
__table_args__ = ({"schema": "belgarath", "extend_existing": True})
id_ = Column(Integer, primary_key=True)
match_id = Column(Integer, ForeignKey("belgarath.match_.id_"))
match_ = relationship("Match", back_populates="belgarath.bet")
class Match(Base):
__tablename__ = "match_"
__table_args__ = ({"schema": "belgarath", "extend_existing": True})
id_ = Column(Integer, primary_key=True)
tournament_id = Column(Integer)= Column(Integer)
dal = DataAccessLayer()
dal.create_session()
bets = dal.session.query(Bet)
bets.join(Match)
for bet in bets:
print(bet.id_, bet.tournament_id = Column(Integer))
However I'm getting the following error on the bets = dal.session.query(Bet) line:
Exception has occurred: InvalidRequestError
Mapper 'mapped class Match->match_' has no property 'belgarath.bet'
Where am I going wrong? Do I need some kind of reciprocal relationship in Match?

Related

Generic database functions for serveral tables with sqlalchemy

I have a base table and serveral tables for professors, students, ... und use for this the single table inheritance from sqlalchemy.
from sqlalchemy import create_engine
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
base = declarative_base()
class TableBase(base):
__tablename__ = 'base_table'
id = Column(Integer, primary_key=True)
name = Column(String)
email = Column(String)
class Student(TableBase):
__tablename__ = 'student_table'
id = Column(Integer, ForeignKey(TableBase.id), primary_key=True)
__mapper_args__ = {
'polymorphic_identity': 'student_table'
}
class Professor(TableBase):
__tablename__ = 'professor_table'
id = Column(Integer, ForeignKey(TableBase.id), primary_key=True)
__mapper_args__ = {
'polymorphic_identity': 'professor_table'
}
class Database:
def __init__(self):
db_string = "postgresql+psycopg2://user:secret#localhost:5432/<database>"
engine = create_engine(db_string)
Session = sessionmaker(engine)
self.session = Session()
base.metadata.create_all(engine)
def add_entry(self, data, table):
table_entry = self.create_table_entry(data, table)
self.session.add(table_entry)
self.session.commit()
def add_entries(self, data_list, table):
entries = []
for data in data_list:
table_entry = self.create_table_entry(data, table.__class__.__name__)
entries.append(table_entry)
self.session.add_all(entries)
self.session.commit()
def create_table_entry(self, data, table):
table_entry = table(
id=data["id"],
subject=data["subject"],
email=data["email"]
)
return table_entry
def main():
student_table = Student()
professor_table = Professor()
db = Database()
data = {"id": 42, "name": "John", "email": ""}
db.create_table_entry(data, student_table)
db.create_table_entry(data, professor_table)
if __name__ == "__main__":
main()
But I don't know how I could create the create_table_entry function with using different object classes.
What are the best practice way for this?
def create_table_entry(data, table):
return table(**data)
Then do you even need separate function for this?
you can use sqlalchemy insert command like this:
from sqlalchemy import insert
def add_row(data, table):
self.session.execute(insert(table), data)
self.session.commit()

SQLAlchemy Polymorphism: Parent table does not exist when creating a child record

When trying to commit a record the following exception arises:
OperationalError (sqlite3.OperationalError) no such
table: basedocument [SQL: INSERT INTO basedocument (common_field,
doc_type) VALUES (?, ?)] [parameters: ('humanidade', 'user')]
basedocument is the parent table in a polimorphic association, and user is the child table.
Shouldn't the parent table be created when I create a record on a child table?
Here's the code:
from sqlalchemy import create_engine, ForeignKey, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import scoped_session, sessionmaker
engine = create_engine("sqlite:///temp.db")
sessao = scoped_session(sessionmaker(bind=engine))
Base = declarative_base()
class BaseDocument(Base):
__tablename__ = "basedocument"
id = Column(Integer, primary_key=True)
common_field = Column(String)
doc_type = Column(String(20))
__mapper_args__ = {
"polymorphic_identity": "basedocument",
"polymorphic_on": doc_type,
}
class User(BaseDocument):
__tablename__ = "user"
id = Column(Integer, ForeignKey("basedocument.id"), primary_key=True)
name = Column(String)
fullname = Column(String)
nickname = Column(String)
__mapper_args__ = {
"polymorphic_identity": "user",
}
u1 = User(
name="Dumont",
fullname="Santos Dumont",
nickname="voador",
common_field="humanidade",
)
sessao.add(u1)
sessao.commit()
print("\nObject: ", u1)
Well, I just noticed that I forgot to use
Base.metadata.create_all(engine)
after the classes definitions. This is what really creates the tables.
This answer is valid for many questions around the "No such table" error.
The right code:
from sqlalchemy import create_engine, ForeignKey, Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.orm import scoped_session, sessionmaker
engine = create_engine("sqlite:///temp.db")
sessao = scoped_session(sessionmaker(bind=engine))
Base = declarative_base()
class BaseDocument(Base):
__tablename__ = "basedocument"
id = Column(Integer, primary_key=True)
common_field = Column(String)
doc_type = Column(String(20))
__mapper_args__ = {
"polymorphic_identity": "basedocument",
"polymorphic_on": doc_type,
}
class User(BaseDocument):
__tablename__ = "user"
id = Column(Integer, ForeignKey("basedocument.id"), primary_key=True)
name = Column(String)
fullname = Column(String)
nickname = Column(String)
__mapper_args__ = {
"polymorphic_identity": "user",
}
Base.metadata.create_all(engine)
u1 = User(
name="Dumont",
fullname="Santos Dumont",
nickname="voador",
common_field="humanidade",
)
sessao.add(u1)
sessao.commit()
print("\nObject: ", u1)

Extract table names from SQLAlchemy exist query

How do i extract the table name from SQLAlchemy exist statement
assume we have the following code
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Person(Base):
__tablename__ = 'person'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
engine = create_engine('mysql://...')
Session = sessionmaker(bind=engine)
conn = engine.connect()
session = Session(bind=conn)
query_exists = session.query(Person).exists()
how can i extract the table name from the query_exists?
from sqlalchemy.sql.visitors import ClauseVisitor
from sqlalchemy import Table
def extract_tables(sql_stmt):
tables = []
visitor = ClauseVisitor()
cluase_iter = visitor.iterate(elem)
for e in cluase_iter:
if isinstance(e, Table):
tables.append(e)
if isinstance(e, (ValuesBase, UpdateBase)):
tables.append(e.table)
return set(tables)

Sqlalchemy association proxy and no_autoflush

I'm trying to figure out why I need to use a no_autoflush block when inserting data into an association proxy if the association proxy data has been accessed first. An example of this is bellow (using MySQL):
from sqlalchemy import create_engine, Integer, Column, String, ForeignKey, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, scoped_session
Base = declarative_base()
engine = create_engine('{}://{}:{}#{}/{}'.format(...))
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
class DomainModel(Base):
__tablename__ = 'domains'
id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False, unique=True)
domains_to_servers = relationship("DomainServerModel", back_populates="domain")
servers = association_proxy('domains_to_servers', 'server',
creator=lambda s: DomainServerModel(server=s))
class ServerModel(Base):
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
name = Column(String(128), nullable=False, unique=True, index=True)
domains_to_servers = relationship("DomainServerModel", back_populates="server")
domains = association_proxy('domains_to_servers', 'domain',
creator=lambda d: DomainServerModel(domain=d))
class DomainServerModel(Base):
__tablename__ = 'domains_to_servers'
id = Column(Integer, primary_key=True)
domain_id = Column(Integer, ForeignKey('domains.id'), nullable=False)
server_id = Column(Integer, ForeignKey('servers.id'), nullable=False)
server = relationship('ServerModel', back_populates="domains_to_servers")
domain = relationship('DomainModel', back_populates="domains_to_servers")
def test():
session = Session()
with session.no_autoflush:
s = session.query(ServerModel).filter_by(name='test.com').one()
print(s.domains)
d = DomainModel(name='test1.com')
session.add(d)
session.commit()
s.domains.append(d)
session.commit()
if __name__ == '__main__':
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
session = Session()
session.add(ServerModel(name='test.com'))
session.commit()
test()
I'm trying to add a new domain_to_server mapping via the server/domain association proxy. If I don't access the association proxy first, ie remove the print statement in test(), then I can add the domain without needing the session.no_autoflush block. But with the print statement in there, it will fail without the session.no_autoflush block with an IntegrityError, saying that server_id cannot be null in the domains to servers table.
I'm trying to figure out why the no_autoflush block is needed here. I don't see any mention of it in the association_proxy docs. Is this simply the way it is, and all inserts into an association_proxy should to happen in a no_autoflush bock in case it has been accessed prior to the insert?

Use JOIN in proxy association

I have proxy association between Content and ContentRevision. Usage of sqlalchemy.ext.associationproxy.association_proxy produce an EXISTS condition:
from sqlalchemy import create_engine, Column, Integer, Text, ForeignKey, inspect, String, and_
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, foreign, remote
DeclarativeBase = declarative_base()
class ContentRevision(DeclarativeBase):
__tablename__ = 'content_revision'
revision_id = Column(Integer, primary_key=True)
content_id = Column(Integer, ForeignKey('content.id'))
description = Column(Text())
title = Column(String(32))
class Content(DeclarativeBase):
__tablename__ = 'content'
id = Column(Integer, primary_key=True)
revisions = relationship("ContentRevision",
foreign_keys=[ContentRevision.content_id])
revision = relationship(
"ContentRevision",
uselist=False,
primaryjoin=lambda: and_(
remote(Content.id) == foreign(ContentRevision.content_id),
ContentRevision.revision_id == ContentRevision.revision_id == session.query(ContentRevision.revision_id)
.filter(ContentRevision.content_id == Content.id)
.order_by(ContentRevision.revision_id.desc())
.limit(1)
.correlate(Content)
),
)
title = association_proxy('revision', 'title')
description = association_proxy('revision', 'description')
# Prepare database and session
engine = create_engine('sqlite://', echo=False)
DeclarativeBase.metadata.create_all(engine)
session_maker = sessionmaker(engine)
session = session_maker()
#
c1 = Content()
c1.revisions.append(ContentRevision(title='rev', description='rev1'))
session.add(c1)
session.flush()
c1.revisions.append(ContentRevision(title='rev', description='rev2'))
assert [('rev', 'rev1'), ('rev', 'rev2')] == session.query(ContentRevision.title, ContentRevision.description).all()
print(str(session.query(Content).filter(Content.title == 'foo')))
"""
SELECT content.id AS content_id
FROM content
WHERE EXISTS (SELECT 1
FROM content_revision
WHERE content.id = content_revision.content_id AND content_revision.revision_id = (SELECT content_revision.revision_id AS content_revision_revision_id
FROM content_revision
WHERE content_revision.content_id = content.id ORDER BY content_revision.revision_id DESC
LIMIT :param_1) AND content_revision.title = :title_1)
"""
How to make a query on associated column Content.title using the join declared in primaryjoin relationship of Content.revision ?

Categories