slqalchemy orm using select where with parameters - python

I am trying something really simple, but I cannot find the proper way to do it in any of the sqlalchemy orm tutorials I can find. I want to do the equivalent of the following from Adonisjs:
Database.query('SELECT * FROM tbl WHERE user = ? AND age = ?', ['Tester', 18])
How do I do parameters in the below sqlalchemy python code? What am I doing wrong?
from sqlalchemy.orm import Session
engine = create_engine("postgresql+psycopg2://test:test#localhost:5432/test", echo=False, future=True)
session = Session(engine)
sql = select(User).where(User.first_name == 'Tester').where(User.age == 18)
user = session.execute(sql)
So instead of User.first_name == 'Tester', I'd like it to be a binding placeholder. Same goes for User.age == 18. Then is session.execute(sql) I'd like to add the bindings. Is there a way to do this, or am I approaching this the incorrect way? I want to use orm, so the syntax above. I'm trying to learn the newest sqlalchemy with orm instead of core.

As far as I know, bind parameters like the ones in your qmark style query are only available on text based queries like a TextClause.
ORM and textual queries are compatible via Select.from_statement.
import sqlalchemy as sa
from sqlalchemy import orm
Base = orm.declarative_base()
class User(Base):
__tablename__ = "user"
id = sa.Column(sa.Integer, primary_key=True)
first_name = sa.Column(sa.String)
age = sa.Column(sa.Integer)
def __repr__(self):
return f"User(first_name={self.first_name}, age={self.age})"
engine = sa.create_engine("sqlite:///:memory:", echo=True, future=True)
Base.metadata.create_all(engine)
u1 = User(first_name="Alice", age=21)
u2 = User(first_name="Bob", age=20)
session = orm.Session(engine)
session.add_all([u1, u2])
session.flush()
stmt = sa.select(User).from_statement(
sa.text("SELECT * FROM user WHERE first_name = :fn AND age = :age")
)
session.execute(stmt, {"fn": "Alice", "age": 21}).scalars().one()
stmt = sa.select(User).where(User.first_name == "Alice", User.age == 21)
session.execute(stmt).scalars().one()
# or with variables
fn = "Alice"
age = 21
stmt = sa.select(User).where(User.first_name == fn, User.age == age)
session.execute(stmt).scalars().one()

Related

Using sqlalchemy with psycopg

I am in need of combining the results of a SQLAlchemy query and a pyscopg query.
Currently I use psycopg to do most of my SQL selects in my code. This is done using a cursor and fetchall().
However, I have a separate microservice that returns some extra WHERE clauses I need for my statement, based on some variables. This is returned as a SQLAlchemy SELECT object. This is out of my control.
Example return:
select * from users where name = 'bar';
My current solution for this is to hardcode the results of the microservice (just the WHERE clauses) into an enum and add them into the pyscopg statement using an f-string. This is a temporary solution.
Simplified example:
user_name = "bar"
sql_enum = {
"foo": "name = 'foo'"
"bar": "name = 'bar'"
}
with conn.cursor() as cur:
cur.execute(f"select * from users where location = 'FOOBAR' and {sql_enum[user_name]}")
I am looking for a way to better join these two statements. Any suggestions are greatly appreciated!
Rather than mess with dynamic SQL (f-strings, etc.), I would just start with a SQLAlchemy Core select() statement and then add the whereclause from the statement returned by the microservice:
import sqlalchemy as sa
engine = sa.create_engine("postgresql://scott:tiger#192.168.0.199/test")
users = sa.Table(
"users", sa.MetaData(),
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("name", sa.String(50)),
sa.Column("location", sa.String(50))
)
users.drop(engine, checkfirst=True)
users.create(engine)
# mock return from microservice
from_ms = sa.select(sa.text("*")).select_from(users).where(users.c.name == "bar")
base_query = sa.select(users).where(users.c.location == "FOOBAR")
full_query = base_query.where(from_ms.whereclause)
engine.echo = True
with engine.begin() as conn:
result = conn.execute(full_query)
"""SQL emitted:
SELECT users.id, users.name, users.location
FROM users
WHERE users.location = %(location_1)s AND users.name = %(name_1)s
[generated in 0.00077s] {'location_1': 'FOOBAR', 'name_1': 'bar'}
"""

SQLAlchemy - pass a dynamic tablename to query function?

I have a simple polling script that polls entries based on new ID's in a MSSQL table. I'm using SQLAlchemy's ORM to create a table class and then query that table. I want to be able to add more tables "dynamically" without coding it directly into the method.
My polling function:
def poll_db():
query = db.query(
Transactions.ID).order_by(Transactions.ID.desc()).limit(1)
# Continually poll for new images to classify
max_id_query = query
last_max_id = max_id_query.scalar()
while True:
max_id = max_id_query.scalar()
if max_id > last_max_id:
print(
f"New row(s) found. "
f"Processing ids {last_max_id + 1} through {max_id}"
)
# Insert ML model
id_query = db.query(Transactions).filter(
Transactions.ID > last_max_id)
df_from_query = pd.read_sql_query(
id_query.statement, db.bind, index_col='ID')
print(f"New query was made")
last_max_id = max_id
time.sleep(5)
My table model:
import sqlalchemy as db
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import defer, relationship, query
from database import SessionLocal, engine
insp = db.inspect(engine)
db_list = insp.get_schema_names()
Base = declarative_base(cls=BaseModel)
class Transactions(Base):
__tablename__ = 'simulation_data'
sender_account = db.Column('sender_account', db.BigInteger)
recipient_account = db.Column('recipient_account', db.String)
sender_name = db.Column('sender_name', db.String)
recipient_name = db.Column('recipient_name', db.String)
date = db.Column('date', db.DateTime)
text = db.Column('text', db.String)
amount = db.Column('amount', db.Float)
currency = db.Column('currency', db.String)
transaction_type = db.Column('transaction_type', db.String)
fraud = db.Column('fraud', db.BigInteger)
swift_bic = db.Column('swift_bic', db.String)
recipient_country = db.Column('recipient_country', db.String)
internal_external = db.Column('internal_external', db.String)
ID = Column('ID', db.BigInteger, primary_key=True)
QUESTION
How can I pass the table class name "dynamically" in the likes of poll_db(tablename), where tablename='Transactions', and instead of writing similar queries for multiple tables, such as:
query = db.query(Transactions.ID).order_by(Transactions.ID.desc()).limit(1)
query2 = db.query(Transactions2.ID).order_by(Transactions2.ID.desc()).limit(1)
query3 = db.query(Transactions3.ID).order_by(Transactions3.ID.desc()).limit(1)
The tables will have identical structure, but different data.
I can't give you a full example right now (will edit later) but here's one hacky way to do it (the documentation will probably be a better place to check):
def dynamic_table(tablename):
for class_name, cls in Base._decl_class_registry.items():
if cls.__tablename__ == tablename:
return cls
Transactions2 = dynamic_table("simulation_data")
assert Transactions2 is Transactions
The returned class is the model you want. Keep in mind that Base can only access the tables that have been subclassed already so if you have them in other modules you need to import them first so they are registered as Base's subclasses.
For selecting columns, something like this should work:
def dynamic_table_with_columns(tablename, *columns):
cls = dynamic_table(tablename)
subset = []
for col_name in columns:
column = getattr(cls, col_name)
if column:
subset.append(column)
# in case no columns were given
if not subset:
return db.query(cls)
return db.query(*subset)

Converting a query with built-in MySQL functions to flask-sqlalchemy

I have a MySQL query like this:
UPDATE mytable SET is_active=false
WHERE created < DATE_SUB(NOW(), INTERVAL `interval` second)
How can I express it using flask-sqlalchemy's ORM (i.e. via the MyTable model)?
This may not be the most elegant solution, but it seems to be working for me:
Base = declarative_base()
class Account(Base):
__tablename__ = "so62234199"
id = sa.Column(sa.Integer, primary_key=True)
created = sa.Column(sa.DateTime)
interval = sa.Column(sa.Integer)
is_active = sa.Column(sa.Boolean)
def __repr__(self):
return f"<Account(id={self.id}, created='{self.created}')>"
Session = sessionmaker(bind=engine)
session = Session()
account_table = list(Account.metadata.tables.values())[0]
upd = (
account_table.update()
.values(is_active=False)
.where(
Account.created
< sa.func.date_sub(
sa.func.now(),
sa.text(
" ".join(
["INTERVAL", str(Account.interval.compile()), "SECOND"]
)
),
)
)
)
with engine.connect() as conn:
conn.execute(upd)
The SQL statement generated is
INFO sqlalchemy.engine.Engine UPDATE so62234199 SET is_active=%s WHERE so62234199.created < date_sub(now(), INTERVAL so62234199.interval SECOND)
INFO sqlalchemy.engine.Engine (0,)

Using window functions to LIMIT a query with SqlAlchemy on Postgres

I'm trying to write the following sql query with sqlalchemy ORM:
SELECT * FROM
(SELECT *, row_number() OVER(w)
FROM (select distinct on (grandma_id, author_id) * from contents) as c
WINDOW w AS (PARTITION BY grandma_id ORDER BY RANDOM())) AS v1
WHERE row_number <= 4;
This is what I've done so far:
s = Session()
unique_users_contents = (s.query(Content).distinct(Content.grandma_id,
Content.author_id)
.subquery())
windowed_contents = (s.query(Content,
func.row_number()
.over(partition_by=Content.grandma_id,
order_by=func.random()))
.select_from(unique_users_contents)).subquery()
contents = (s.query(Content).select_from(windowed_contents)
.filter(row_number >= 4)) ## how can I reference the row_number() value?
result = contents
for content in result:
print "%s\t%s\t%s" % (content.id, content.grandma_id,
content.author_id)
As you can see it's pretty much modeled, but I have no idea how to reference the row_number() result of the subquery from the outer query where. I tried something like windowed_contents.c.row_number and adding a label() call on the window func but it's not working, couldn't find any similar example in the official docs or in stackoverflow.
How can this be accomplished? And also, could you suggest a better way to do this query?
windowed_contents.c.row_number against a label() is how you'd do it, works for me (note the select_entity_from() method is new in SQLA 0.8.2 and will be needed here in 0.9 vs. select_from()):
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Content(Base):
__tablename__ = 'contents'
grandma_id = Column(Integer, primary_key=True)
author_id = Column(Integer, primary_key=True)
s = Session()
unique_users_contents = s.query(Content).distinct(
Content.grandma_id, Content.author_id).\
subquery('c')
q = s.query(
Content,
func.row_number().over(
partition_by=Content.grandma_id,
order_by=func.random()).label("row_number")
).select_entity_from(unique_users_contents).subquery()
q = s.query(Content).select_entity_from(q).filter(q.c.row_number <= 4)
print q

mysql Compress() with sqlalchemy

table:
id(integer primary key)
data(blob)
I use mysql and sqlalchemy.
To insert data I use:
o = Demo()
o.data = mydata
session.add(o)
session.commit()
I would like to insert to table like that:
INSERT INTO table(data) VALUES(COMPRESS(mydata))
How can I do this using sqlalchemy?
you can assign a SQL function to the attribute:
from sqlalchemy import func
object.data = func.compress(mydata)
session.add(object)
session.commit()
Here's an example using a more DB-agnostic lower() function:
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
Base= declarative_base()
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True)
data = Column(String)
e = create_engine('sqlite://', echo=True)
Base.metadata.create_all(e)
s = Session(e)
a1 = A()
a1.data = func.lower("SomeData")
s.add(a1)
s.commit()
assert a1.data == "somedata"
you can make it automatic with #validates:
from sqlalchemy.orm import validates
class MyClass(Base):
# ...
data = Column(BLOB)
#validates("data")
def _set_data(self, key, value):
return func.compress(value)
if you want it readable in python before the flush, you'd need to memoize it locally and use a descriptor to access it.

Categories