my design idea is, that 'db objects' will have implemented some methods, and after querying and checking relevant values i want to pass this instance to threading.Thread() and execute some code in different thread. I wanted to use scoped_session for this.
Problem is that instance contains session object and therefore it is throwing exception:
sqlite3.ProgrammingError: SQLite objects created in a thread can only
be used in that same thread. The object was created in thread id
140081404675584 and this is thread id 140081133512448.
db_alchemy/__init__.py:
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
Base = declarative_base()
e = create_engine("sqlite:///datafiles/data.sqlite")
Base.metadata.create_all(e)
Session = scoped_session(sessionmaker(bind=e))
db_alchemy/tables.py:
import threading
from sqlalchemy import Column, Integer, String, ForeignKey, DateTime, Time
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm import relationship, backref
from db_alchemy import Base, Session
class Request(Base):
__tablename__ = 'request'
id = Column(Integer, primary_key=True)
scan_title = Column(String)
status = Column(String)
servers = association_proxy('request_servers','server')
def print_servers(self):
print(threading.current_thread())
print(self.servers)
class Server(Base):
__tablename__ = 'server'
ip = Column(String, primary_key=True, autoincrement=False)
class Request2Server(Base):
__tablename__ = 'request2server'
request_id_fk = Column(Integer, ForeignKey('request.id'), primary_key=True)
server_ip_fk = Column(String, ForeignKey('server.ip'), primary_key=True)
request_rel = relationship('Request', backref=backref('request_servers',lazy='dynamic'))
server = relationship('Server', backref=backref('server_requests',lazy='dynamic'))
def __init__(self, server=None):
self.server = server
runner.py:
import threading
from threading import Thread
import db_alchemy.tables as t
from db_alchemy import Session, Base, e
if __name__ == '__main__':
Base.metadata.drop_all(e)
Base.metadata.create_all(e)
ses = Session()
r1 = t.Request(scan_title='R1', status='running')
r2 = t.Request(scan_title='R2', status='finished')
s1 = t.Server(ip='IP1')
s2 = t.Server(ip='IP2')
r1.servers.append(s1)
r1.servers.append(s2)
r2.servers.append(s2)
ses.add_all([r1, r2])
ses.commit()
for request in ses.query(t.Request).filter(t.Request.status == 'running').all():
print(threading.current_thread())
r1.print_servers()
Thread(target=r1.print_servers).start()
Is it possible to avoid this error in this design?
Is there some different design i should implement?
Related
lets say I've 2 tables users and devices. They have relation one-to-many.
In Sql, I can solve the mentioned problem by following query.
SELECT
users.*, devices.*
FROM
users
LEFT JOIN ( SELECT d1.*
FROM devices as d1
LEFT JOIN devices AS d2
ON d1.user_id = d2.user_id AND d1.date < d2.date
WHERE d2.user_id IS NULL ) as device_temp
ON (users.id = device_temp.user_id)
Here is my python code
#user_model.py
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
first_name = Column(String(500), nullable=False)
last_name = Column(String(250), nullable=False)
device_model.py
#device_model.py
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, relation
from sqlalchemy import create_engine
from user_model import User
Base = declarative_base()
class DeviceModel(Base):
__tablename__ = 'device'
id = Column(Integer, primary_key=True)
created_at = Column(DateTime(), nullable=False)
device_id = Column(String(250), nullable=False)
user_uid = Column(String, ForeignKey((User.id)))
owner = relation(User, backref='user_device')
run.py
#run.py
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from user_model import User, Base
from sleep_session import SleepSession, Base
from device_model import DeviceModel, Base
engine = create_engine(connection_string)
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
query = session.query(User,DeviceModel).join(DeviceModel)
results = query.all()
for row in results:
print(row.User.first_name +" "+ row.DeviceModel.device_id + " "+ str(row.DeviceModel.created_at))
I know this type of question is asked multiple times, but I could not find one with SqlAlchemy ORM.
I want the same result as described here
Thanks.
I used this question to practive sqlalchemy as I'm new to it.
Closest answer I can get is the following:
If you want to see 1 file full workable code go into the edits - I'll remove boilerplate code
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime
from sqlalchemy.orm import relationship, relation
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
connection_string = 'postgres://postgres:password#localhost/test'
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
first_name = Column(String(500), nullable=False)
last_name = Column(String(250), nullable=False)
class DeviceModel(Base):
__tablename__ = 'device'
id = Column(Integer, primary_key=True)
created_at = Column(DateTime(), nullable=False)
device_id = Column(String(250), nullable=False)
user_uid = Column(Integer, ForeignKey((User.id))) # error Key columns "user_uid" and "id" are of incompatible types: character varying and integer.
owner = relation(User, backref='user_device')
engine = create_engine(connection_string)
Base.metadata.bind = engine
#User.__table__.create(engine)
#DeviceModel.__table__.create(engine)
DBSession = sessionmaker(bind=engine)
session = DBSession()
My Answer:
from sqlalchemy import and_, or_
from sqlalchemy.orm import aliased
DeviceModel2 = aliased(DeviceModel)
subquery = (session
.query(DeviceModel.created_at)
.outerjoin(DeviceModel2,
and_(DeviceModel.user_uid == DeviceModel2.user_uid,
DeviceModel.created_at < DeviceModel2.created_at))
.filter(DeviceModel2.user_uid == None)
.subquery('subq'))
query = (session
.query(User, DeviceModel)
.outerjoin(DeviceModel)
.filter(or_(
DeviceModel.created_at == subquery.c.created_at,
DeviceModel.id == None)))
print(query)
results = query.all()
for row in results:
if row[1]:
print({**row.User.__dict__, **row.DeviceModel.__dict__})
else:
print(row.User.__dict__)
from db_config import connection_string
from sqlalchemy import create_engine , and_ , inspect
from sqlalchemy.orm import sessionmaker, aliased
from user_model import User, Base
from device_model import DeviceModel, Base
engine = create_engine(connection_string)
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
DeviceModel_aliased = aliased(DeviceModel)
#make sub-query
query_for_latest_device = session.query(DeviceModel).\
outerjoin(DeviceModel_aliased,
and_(DeviceModel_aliased.user_uid == DeviceModel.user_uid,
DeviceModel_aliased.created_at > DeviceModel.created_at)).\
filter(DeviceModel_aliased.id == None).\
subquery()
use_subquery_and_join = session.query(User.first_name,latest_device).\
join(query_for_latest_device,
query_for_latest_device.c.user_uid == User.user_id).\
all()
for row in join_user_and_device:
print(row._asdict())
How to add a function/expression which takes arguments as other columns as a default value to a column in the table of SQLAlchemy? For example: I want to define c as a column which is 2*x(other column);which should be saved in the database(could be in other table too). Can #hybrid_property decorator be used in this context?
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, aliased
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///Helloworld.db', echo=False)
Session = sessionmaker(bind=engine)
metadata = MetaData(engine)
Base = declarative_base()
class HelloWorld(Base):
__tablename__ = 'helloworld'
pm_key = Column(Integer, primary_key=True)
x = Column(Integer, nullable=False)
c = Column(Integer,default=2*x)
Base.metadata.create_all(engine)
It is possible. Below I'am just adding a piece of code you can try . For more I think this will help you.
def mydefault(context):
return context.current_parameters.get('X')
class HelloWorld(Base):
__tablename__ = 'helloworld'
pm_key = Column(Integer, primary_key=True)
x = Column(Integer, nullable=False)
c = Column(Integer,default=mydefault)
I'm having a problem with sqlalchemy which is stopping me from making any ground. I've created other tables with relationships but this one seems to not work because of some faulty tables I made previously. The code below is the test and when I run it I get the error I've pasted below the code:
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class X(Base):
__tablename__ = 'x2'
pid = Column(Integer, primary_key=True)
color = Column(String(4), nullable=False)
player = relationship('Y')
class Y(Base):
__tablename__ = 'y2'
name = Column(String(16), nullable=False)
id = Column(Integer, primary_key=True)
champion = Column(String(16), nullable=False)
kills = Column(Integer, nullable=False)
deaths = Column(Integer, nullable=False)
team_id = Column(Integer, ForeignKey('x2.pid'))
engine = create_engine('postgresql://db')
Base.metadata.create_all(engine)
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
#from lol_db_setup import Base, Match, Team, Player
engine = create_engine('postgresql://heyfinn:lolpassword#leagueoflegendsdb.c3jpkci5dhiy.ap-southeast-2.rds.amazonaws.com:5432/leagueoflegendsdb')
Base.metadata.bind = engine
DBSession = sessionmaker(bind = engine)
session = DBSession()
teamEntry = X(pid=1234, color = 'red')
newEntry = Y(name='heyfinn', id=622740, champion = 'Zac', kills=10, deaths = 0, team_id= 1234)
session.commit()
---------------------------------------------------------------------------
InvalidRequestError Traceback (most recent call last)
<ipython-input-33-56c613d76617> in <module>()
8 DBSession = sessionmaker(bind = engine)
9 session = DBSession()
---> 10 teamEntry = X(id=1234, color = 'red')
11 newEntry = Y(name='heyfinn', id=622740, champion = 'Zac', kills=10, deaths = 0, team_id= 1234)
12 session.commit()
<string> in __init__(self, **kwargs)
InvalidRequestError: One or more mappers failed to initialize - can't proceed with initialization of other mappers. Triggering mapper:
'Mapper|Match|match'. Original exception was: Could not determine join condition between parent/child tables on relationship Match.team - there are no foreign keys linking these tables.
Ensure that referencing columns are associated with a ForeignKey or ForeignKeyConstraint, or specify a 'primaryjoin' expression.
So I gather the problem must be with a table 'match' and it's foreign key that I'd created earlier. How can I get rid of this remnant of the 'match' table so I can just build these other tables?
I'm trying to figure out why I need to use a no_autoflush block when inserting data into an association proxy if the association proxy data has been accessed first. An example of this is bellow (using MySQL):
from sqlalchemy import create_engine, Integer, Column, String, ForeignKey, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship, scoped_session
Base = declarative_base()
engine = create_engine('{}://{}:{}#{}/{}'.format(...))
session_factory = sessionmaker(bind=engine)
Session = scoped_session(session_factory)
class DomainModel(Base):
__tablename__ = 'domains'
id = Column(Integer, primary_key=True)
name = Column(String(255), nullable=False, unique=True)
domains_to_servers = relationship("DomainServerModel", back_populates="domain")
servers = association_proxy('domains_to_servers', 'server',
creator=lambda s: DomainServerModel(server=s))
class ServerModel(Base):
__tablename__ = 'servers'
id = Column(Integer, primary_key=True)
name = Column(String(128), nullable=False, unique=True, index=True)
domains_to_servers = relationship("DomainServerModel", back_populates="server")
domains = association_proxy('domains_to_servers', 'domain',
creator=lambda d: DomainServerModel(domain=d))
class DomainServerModel(Base):
__tablename__ = 'domains_to_servers'
id = Column(Integer, primary_key=True)
domain_id = Column(Integer, ForeignKey('domains.id'), nullable=False)
server_id = Column(Integer, ForeignKey('servers.id'), nullable=False)
server = relationship('ServerModel', back_populates="domains_to_servers")
domain = relationship('DomainModel', back_populates="domains_to_servers")
def test():
session = Session()
with session.no_autoflush:
s = session.query(ServerModel).filter_by(name='test.com').one()
print(s.domains)
d = DomainModel(name='test1.com')
session.add(d)
session.commit()
s.domains.append(d)
session.commit()
if __name__ == '__main__':
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
session = Session()
session.add(ServerModel(name='test.com'))
session.commit()
test()
I'm trying to add a new domain_to_server mapping via the server/domain association proxy. If I don't access the association proxy first, ie remove the print statement in test(), then I can add the domain without needing the session.no_autoflush block. But with the print statement in there, it will fail without the session.no_autoflush block with an IntegrityError, saying that server_id cannot be null in the domains to servers table.
I'm trying to figure out why the no_autoflush block is needed here. I don't see any mention of it in the association_proxy docs. Is this simply the way it is, and all inserts into an association_proxy should to happen in a no_autoflush bock in case it has been accessed prior to the insert?
Is it possible to instance a table with its tablename?
I've looked for in SQLAlchemy documentation and I couldn't find anything.
class A():
__tablename__ = 'x'
newTable = Table('x')
Is possible something like this?
This is a pseudo-language, not real Python code
Thanks,
Create_a.py
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class A(Base):
__tablename__ = 'X'
A_id = Column(Integer, primary_key=True)
A_name = Column(String(250), nullable=False)
engine = create_engine('sqlite:///sqlalchemy_example.db')
Base.metadata.create_all(engine)
Insert_a.py
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from Create_a import A, Base, engine
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
print A.__tablename__
A.__tablename__ = A
new_A = A.__tablename__(A_name='new A')
session.add(new_A)
session.commit()
Were A.__tablename__ is X
Given some model Foo:
class Foo(Base):
__tablename__ = 'foos'
...
the associated table object can be accessed directly via the __table__ attribute (provided that the entity has been mapped, for example via Base.metadata.create_all):
tbl = Foo.__table__
If only the value of Foo.__tablename__ is available the table can be retrieved using reflection:
tbl = sa.Table('foos', sa.MetaData(), autoload_with=engine)