I often see, that the model instance with one-to-many relationship is explicitly extended following its initialization, such as:
one = One()
# some code goes here
one.many = [Many(), Many(), Many()]
one.many.append(Many())
But in my case I see it reasonable to initialize a many-to-one object with it's relationship already supplied to __init__:
one = One()
many = Many(one = one)
Is it somehow considered a bad practice?
For a reproducible example, please consider the following code:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, ForeignKey, create_engine
from sqlalchemy.orm import relationship, sessionmaker
Base = declarative_base()
class One(Base):
__tablename__ = 'one'
id = Column(Integer, primary_key=True)
many = relationship("Many", back_populates="one")
class Many(Base):
__tablename__ = 'many'
id = Column(Integer, primary_key=True)
one_id = Column(Integer, ForeignKey('one.id'))
one = relationship("One", back_populates="many")
def __init__(self, one=None):
if one is not None:
self.one = one
# Setup the DB and connection
engine = create_engine('sqlite:///:memory:', echo=True)
conn = engine.connect()
session = sessionmaker(bind=engine)()
Base.metadata.create_all(engine)
# Is it proper to initialize a model instance, with its relationship as an argument?
one1 = One()
many1 = Many(one1)
print(many1.one is one1) # True
print(one1.many[0] is many1) # True
Although it looks clean to me so far, this approach may result in an ambiguous code:
# What happens here?
# Instance of many first initialized with one2a,
# then reassigned to one2b?
one2a = One()
many2 = Many(one=one2a)
print(many2.one is one2a) # True
one2b = One(many=[many2]) # same when One(many=[Many(one=one2a)])
print(many2 is one2b.many[0]) # True
print(many2.one is one2a) # False
print(many2.one is one2b) # True
Finally, please consider the aforementioned __init__ method. Since self.many is by default expected to be an empty list, what would be the desired way to initialize it as such?
def __init__(self, many=None):
if many is not None:
self.many = many
Related
EDITED
I've been trying to create an class that will allow me to iterate through a list, or dictionary, and generate tables and rows.
The code starts as follows:
from flask_sqlalchemy import SQLAlchemy as sa
from flask import Flask as fl
import pymysql
pymysql.install_as_MySQLdb()
app = fl(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:the_other_stuff'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] =False
DEBUG =True
db = sa(app)
a=['some_stuff','gaaahhhh','lalala','rawr','pizza']
class test(db.Model):
__tablename__ = 'stuff'
id = db.Column('id', db.Integer, primary_key = True)
data = db.Column('data', db.Unicode(50))
def __init__(self, id, data):
self.id = id
self.data = data
def stuff():
for i in range(len(a)):
data= a[i]
i = i + 1
id = i
db.session.add(test(id,data))
db.create_all()
return db.session.commit()
stuff()
I'm still going to try and structure it so that it can take a dictionary, or list, and then add the key as the table name if it is a dict. If someone has that code, I won't argue with you sharing, but if not I'll post when that is done.
use db.session.add / db.session.commit. db object is initialized from flask_sqlalchemy. It is not db.Session.
The below code auto inserts data into a one-to-many relationship table set. The recursive function acts like a while loop (%timeit showed the same results for both) I just like the way it looks over a while loop. The function will index to a list in a list, I was going 3 deep but modified the loops to simply. This was originally designed to push a list like so: list[a][i][0] each zero value inside every [i] was the same value type, I set it to only do [a][i] to keep it a little more simple, and so it could be used as as base if someone liked it. [a][i][0] was very fast, but [a][i] might be better off as a list of pd.DataFrame, instead of as np.array. If [i] is not going to the same db.Column() you'll have to declare one for each set of [i] and figure out a way of indexing through it.
A table generator involves a class generator with a base class. I don't have a working example, but can upload when I do
import numpy as np
from flask_sqlalchemy import SQLAlchemy as sa
from flask import Flask as fl
import pymysql
pymysql.install_as_MySQLdb()
app = fl(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:the_other_stuff'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] =False
DEBUG =True
db = sa(app)
list=[np.array(np.ones(100)),np.array(np.ones(100)),np.array(np.ones(100)),
np.array(np.ones(100)),np.array(np.ones(100))]
class Parent(db.Model):
id = db.Column('id', db.Integer, primary_key=True)
data= db.Column('data', db.VARCHAR(45))
_child = db.relationship('child', backref='parent', lazy=True)
def __init__(self, data):
self.data = data
def __repr__(self):
return '<Parent %r>' % self.id
class Child(db.Model):
id = db.Column('id', db.Integer, primary_key = True)
data = db.Column('data', db.VARCHAR(45))
parent_id = db.Column(db.Integer, db.ForeignKey('Parent.id'))
parent = db.relationship('Parent')
def __init__(self,data):
self.data = data
def __repr__(self):
return '<Child %r>' % self.id
def child_loop(i = 0):
for a in range(len(list)):
with db.session.no_autoflush:
try:
p = Parent(symbol_col=data[a])
c = child(data = list[a][i])
s.c.append(child)
db.session.add(p)
db.session.add(c)
except exc.IntegrityError as e:
db.session.rollback()
i = i + 1
if a < len(list[0]):
child_loop(i = i)
return print('inserted open_close')
child_loop(i=0)
Assume the following setup:
from sqlalchemy import Column, Integer, String
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class MyClass(Base):
id = Column(Integer, primary_key=True)
name = Column(String)
The normal paradigm to query the DB with SQLAlchemy is to do the following:
Session = sessionmaker()
engine = 'some_db_location_string'
session = Session(bind=engine)
session.query(MyClass).filter(MyClass.id == 1).first()
Suppose, I want to simplify the query to the following:
MyClass(s).filter(MyClass.id == 1).first()
OR
MyClass(s).filter(id == 1).first()
How would I do that? My first attempt at that to use a model Mixin class failed. This is what I tried:
class ModelMixins(object)
def __init__(self, session):
self.session = session
def filter(self, *args):
self.session.query(self).filter(*args)
# Redefine MyClass to use the above class
class MyClass(ModelMixins, Base):
id = Column(Integer, primary_key=True)
name = Column(String)
The main failure seems to be that I can't quite transfer the expression 'MyClass.id == 1' to the actual filter function that is part of the session object.
Folks may ask why would I want to do:
MyClass(s).filter(id == 1).first()
I have seen something similar like this used before and thought that the syntax becomes so much cleaner I can achieve this. I wanted to replicate this but have not been able to. Being able to do something like this:
def get_stuff(some_id):
with session_scope() as s:
rec = MyClass(s).filter(MyClass.id== some_id').first()
if rec:
return rec.name
else:
return None
...seems to be the cleanest way of doing things. For one, session management is kept separate. Secondly, the query itself is simplified. Having a Mixin class like this would allow me to add the filter functionality to any number of classes...So can someone help in this regard?
session.query takes a class; you're giving it self, which is an instance. Replace your filter method with:
def filter(self, *args):
return session.query(self.__class__).filter(*args)
and at least this much works:
In [45]: MyClass(session).filter(MyClass.id==1)
Out[45]: <sqlalchemy.orm.query.Query at 0x10e0bbe80>
The generated SQL looks right, too (newlines added for clarity):
In [57]: str(MyClass(session).filter(MyClass.id==1))
Out[57]: 'SELECT "MyClass".id AS "MyClass_id", "MyClass".name AS "MyClass_name"
FROM "MyClass"
WHERE "MyClass".id = ?'
No guarantees there won't be oddities; I've never tried anything like this before.
Ive been using this mixin to good success. Most likely not the most efficient thing in the world and I am no expert. I define a date_created column for every table
class QueryBuilder:
"""
This class describes a query builer.
"""
q_debug = False
def query_from_dict(self, db_session: Session, **q_params: dict):
"""
Creates a query.
:param db_session: The database session
:type db_session: Session
:param q_params: The quarter parameters
:type q_params: dictionary
"""
q_base = db_session.query(type(self))
for param, value in q_params.items():
if param == 'start_date':
q_base = q_base.filter(
type(self).__dict__.get('date_created') >= value
)
elif param == 'end_date':
q_base = q_base.filter(
type(self).__dict__.get('date_created') <= value
)
elif 'like' in param:
param = param.replace('_like', '')
member = type(self).__dict__.get(param)
if member:
q_base = q_base.filter(member.ilike(f'%{value}%'))
else:
q_base = q_base.filter(
type(self).__dict__.get(param) == value
)
if self.q_debug:
print(q_base)
return q_base
I have been playing around with SQLAlchemy and found out that I cannot track reliably what is being changed within database.
I have created an example that explains what my concern is:
import re
import datetime
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import (
declarative_base,
declared_attr,
)
from sqlalchemy import (
create_engine,
event,
Column,
Boolean,
Integer,
String,
Unicode,
DateTime,
Index,
ForeignKey,
CheckConstraint,
)
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
Session,
relationship,
backref,
)
import transaction
from zope.sqlalchemy import ZopeTransactionExtension
class ExtendedSession(Session):
my_var = None
DBSession = scoped_session(
sessionmaker(extension=ZopeTransactionExtension(),
class_=ExtendedSession
)
)
class BaseModel(object):
query = DBSession.query_property()
id = Column(
Integer,
primary_key=True,
)
#declared_attr
def __tablename__(cls):
class_name = re.sub(r"([A-Z])", r"_\1", cls.__name__).lower()[1:]
return "{0}".format(
class_name,
)
Base = declarative_base(cls=BaseModel)
def initialize_sql(engine):
DBSession.configure(bind=engine)
Base.metadata.bind = engine
engine = create_engine("sqlite://")
initialize_sql(engine)
class Parent(Base):
# *** Columns
col1 = Column (
String,
nullable=False,
)
# *** Relationships
# *** Methods
def __repr__(self):
return "<Parent(id: '{0}', col1: '{1}')>".format(
self.id,\
self.col1,\
)
class Child(Base):
# *** Columns
col1 = Column (
String,
nullable=False,
)
parent_id = Column (
Integer,
ForeignKey (
Parent.id,
ondelete="CASCADE",
),
nullable=False,
)
# *** Relationships
parent = relationship (
Parent,
backref=backref(
"child_elements",
uselist=True,
cascade="save-update, delete",
lazy="dynamic",
),
# If below is uncommented then instance of Parent won't appear in session.dirty
# However this relationship will never be loaded (even if needed)
#lazy="noload",
)
# *** Methods
def __repr__(self):
return "<Child(id: '{0}', col1: '{1}', parent_id: '{2}')>".format(
self.id,\
self.col1,\
self.parent_id,\
)
#event.listens_for(DBSession, 'before_flush')
def before_flush(session, flush_context, instances):
time_stamp = datetime.datetime.utcnow()
if session.new:
for elem in session.new:
print(" ### NEW {0}".format(repr(elem)))
if session.dirty:
for elem in session.dirty:
print(" ### DIRTY {0}".format(repr(elem)))
if session.deleted:
for elem in session.deleted:
print(" ### DELETED {0}".format(repr(elem)))
Base.metadata.drop_all(engine)
Base.metadata.create_all(engine)
with transaction.manager:
parent = Parent(col1="parent")
DBSession.add(parent)
DBSession.flush()
# Below loop is to demonstrate that
# each time child object is created and linked to parent
# parent is also marked as modified
# how to avoid that?
# or optionally is it possible to detect this in before_flush event
# without issuing additional SQL query?
for i in range(0, 10):
parent=Parent.query.filter(Parent.col1 == "parent").first()
child = Child(col1="{0}".format(i))
child.parent = parent
DBSession.add(child)
DBSession.flush()
# Below update will not cause associated instance of Parent appearing in session.dirty
child = Child.query.filter(Child.col1=="3").first()
child.col1="updated"
DBSession.add(child)
DBSession.flush()
In short - there are two objects:
Parent
Child - linked to Parent
Each time I add new instance of Child and link it with instance of Parent that instance of Parent also appears within session.dirty of before_flush event.
SQLAlchemy community adviced this behavior is expected (although I think there must be an option to change default behavior - I could not find it within doco)
So here is my question: is it possible to configure relationship such way that when I add a new instance of Child and link it to instance of Parent then that instance of Parent won't appear within session.dirty?
I have tried setting relationship as lazy="noload" and it is not an option since I may need to use that relationship (so I may need to load it)
I would also accept a solution that would allow me to detect that Parent have not been changed within before_load event handler - however I do not want to trigger additional query to achieve this.
I would appreciate your help,
Greg
After hours of research and a hint from SQLAlchemy community I found solution that seems to work the way I need (notice additional condition within session.dirty block).
#event.listens_for(DBSession, 'before_flush')
def before_flush(session, flush_context, instances):
time_stamp = datetime.datetime.utcnow()
if session.new:
for elem in session.new:
print(" ### NEW {0}".format(repr(elem)))
if session.dirty:
for elem in session.dirty:
# Below check was added to solve the problem
if ( session.is_modified(elem, include_collections=False) ):
print(" ### DIRTY {0}".format(repr(elem)))
if session.deleted:
for elem in session.deleted:
print(" ### DELETED {0}".format(repr(elem)))
The documentation related to my solution can be found here: http://docs.sqlalchemy.org/en/latest/orm/session_api.html#sqlalchemy.orm.session.Session.is_modified
In short - specifying include_collections=False within session.is_modified makes SQLAlchemy to ignore situations where multivalued collections have been changed (in my case if child was changed then parent would be filtered out by that additional check).
I'm trying to model an entity that as one or more one-to-many relationships, such that it's last_modified attribute is updated, when
a child is added or removed
a child is modified
the entity itself is modified
I've put together the following minimal example:
class Config(Base):
__tablename__ = 'config'
ID = Column('ID', Integer, primary_key=True)
name = Column('name', String)
last_modified = Column('last_modified', DateTime, default=now, onupdate=now)
params = relationship('ConfigParam', backref='config')
class ConfigParam(Base):
__tablename__ = 'config_params'
ID = Column('ID', Integer, primary_key=True)
ConfigID = Column('ConfigID', Integer, ForeignKey('config.ID'), nullable=False)
key = Column('key', String)
value = Column('value', Float)
#event.listens_for(Config.params, 'append')
#event.listens_for(Config.params, 'remove')
def receive_append_or_remove(target, value, initiator):
target.last_modified = now()
#event.listens_for(ConfigParam.key, 'set')
#event.listens_for(ConfigParam.value, 'set')
def receive_attr_change(target, value, oldvalue, initiator):
if target.config:
# don't act if the parent config isn't yet set
# i.e. during __init__
target.config.last_modified = now()
This seems to work, but I'm wondering if there's a better way to do this?
Specifically, this becomes very verbose since my actual ConfigParam implementation has more attributes and I'm having multiple one-to-many relations configured on the parent Config class.
Take this with a huge grain of salt, it "seems" to work, could explode:
def rel_listener(t, v, i):
t.last_modified = now()
def listener(t, v, o, i):
if t.config:
t.config.last_modified = now()
from sqlalchemy import inspect
for rel in inspect(Config).relationships:
event.listen(rel, 'append', rel_listener)
event.listen(rel, 'remove', rel_listener)
for col in inspect(ConfigParam).column_attrs:
event.listen(col, 'set', listener)
Problem is that the inspections make no exceptions and columns such as 'ID' and 'ConfigID' will be bound to event listeners.
Another perhaps slightly less tedious form would be to just use a list of attributes to bind events to in a similar fashion:
for attr in ['key', 'value']:
event.listen(getattr(ConfigParam, attr), 'set', listener)
This gives you control over what is bound to events and what is not.
I made this statement using flask-sqlalchemy and I've chosen to keep it in its original form. Post.query is equivalent to session.query(Post)
I attempted to make a subquery that would filter out all posts in a database which are in the draft state and not made or modified by the current user. I made this query,
Post.query\
.filter(sqlalchemy.and_(
Post.post_status != Consts.PostStatuses["Draft"],
sqlalchemy.or_(
Post.modified_by_id == current_user.get_id(),
Post.created_by_id == current_user.get_id()))
which created:
Where true AND ("Post".modified_by_id = :modified_by_id_1 OR
"Post".created_by_id = :created_by_id_1)
Expected outcome:
Where "Post".post_status != "Draft" AND (
"Post".modified_by_id = :modified_by_id_1 OR
"Post".created_by_id = :created_by_id_1)
I'm wondering, why this is happening? How can I increase the error level in SQLAlchemy? I think my project is silently failing and I would like to confirm my guess.
Update:
I used the wrong constants dictionary. One dictionary contains ints, the other contains strings (one for data base queries, one for printing).
_post_status = db.Column(
db.SmallInteger,
default=Consts.post_status["Draft"])
post_status contains integers, Consts.PostStatuses contains strings. In hind sight, really bad idea. I'm going to make a single dictionary that returns a tuple instead of two dictionaries.
#property
def post_status(self):
return Consts.post_status.get(getattr(self, "_post_status", None))
the problem is that your post_status property isn't acceptable for usage in an ORM level query, as this is a python descriptor which at the class level by default returns itself:
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
_post_status = Column(String)
#property
def post_status(self):
return self._post_status
print (A.post_status)
print (A.post_status != 5678)
output:
$ python test.py
<property object at 0x10165bd08>
True
the type of usage you're looking for seems like that of a hybrid attribute, which is a SQLAlchemy-included extension to a "regular" python descriptor which produces class-level behavior that's compatible with core SQL expressions:
from sqlalchemy.ext.hybrid import hybrid_property
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
_post_status = Column(String)
#hybrid_property
def post_status(self):
return self._post_status
print (A.post_status)
print (A.post_status != 5678)
output:
$ python test.py
A._post_status
a._post_status != :_post_status_1
be sure to read the hybrid doc carefully including how to establish the correct SQL expression behavior, descriptors that work both at the instance and class level is a somewhat advanced Python technique.