SqlAlchemy Classes Declaration of two dependent classes - python

I have a problem in the file where I declare all my classes mappers.
class Application(AbstractId):
.........
key_event_id = ORM.column_property(
SA.select([ApplicationEvent.id],
correlate = True,
from_obj = [Application.__table__.join(ApplicationEvent.__table__)]
).as_scalar().label("tag").where(ApplicationEvent.key_event == 1)
)
SA.select([ApplicationEvent]).filter(
ApplicationEvent.key_event)
class ApplicationEvent(AbstractId):
__tablename__ = 'applications_events'
application_id = SA.Column(SA.Integer, SA.ForeignKey(Application.id), primary_key = True)
application = ORM.relationship(Application, backref = 'events')
event_id = SA.Column(SA.Integer, SA.ForeignKey(Event.id), primary_key = True)
event = ORM.relationship(Event)
This won't work since ApplicationEvent is declared before Application. How can I make this work ? I need key_event_id as a column of Application.
This won't work either:
#declarative.declared_attr
def key_event_id(cls):
return ORM.column_property(
SA.select(['ApplicationEvent.id'],
correlate = True,
from_obj = ['Application.__table__'.join('ApplicationEvent.__table__')]
).as_scalar().where('ApplicationEvent.key_event' == 1).label("key_event_id")
)

You can simply pass the model name as a string to the relationship() call.
argument
a mapped class, or actual Mapper instance, representing the target of
the relationship.
argument may also be passed as a callable function which is evaluated
at mapper initialization time, and may be passed as a Python-evaluable
string when using Declarative.

You can do
application = ORM.relationship("Application", backref = 'events')
and
event = ORM.relationship("Event" , order_by="Event.id")
You can write like this way
application_id = SA.Column("id", SA.ForeignKey("Application.id"), primary_key = True)
application = ORM.relationship("Application", backref = 'events')
event_id = SA.Column("id", SA.ForeignKey("Event.id"), primary_key = True)
event = ORM.relationship("Event")

Related

Setting default for QuerySelectFields as part of FieldList with Flask-wtf

In a flask-wtf form, I can't seem to set the default of a QuerySelectField when it's embbedded in a FieldList.
(I'm passing the instance in the database that I want to be the default, rather than the scalar.)
Code is as follows:-
Model:
class Jobs_crewing_roles(db.Model):
__tablename__ = 'jobs_crewing_roles'
sort_by = 'roles_description'
roles_abbreviation = db.Column(db.String(1), primary_key=True, nullable=False)
roles_description = db.Column(db.Text)
roles_short_description = db.Column(db.String(10))
roles_crewing = db.relationship('Jobs_crewing', backref='role')
Form:
def CrewingRoles():
return lambda: Jobs_crewing_roles.query.all()
class CrewingRoleEntryForm(FlaskForm):
role = QuerySelectField('Role', query_factory=CrewingRoles(), get_label = 'roles_description')
class ReviewFlightForm(FlaskForm):
t_job_crewing_roles = FieldList(FormField(CrewingRoleEntryForm), min_entries=0)
t_job_review_submit = SubmitField('Save Changes')
Helper function:
job.review_form = ReviewFlightForm()
roles_items = []
for p in ast.literal_eval(form.t_job_crewing_roles_dict.data):
select_entry = CrewingRoleEntryForm()
select_entry.role.id = "role_"+str(p['id'])
select_entry.role.label = p['id']
select_entry.role.default = Jobs_crewing_roles.query.filter(Jobs_crewing_roles.roles_abbreviation == p['role']).first()
roles_items.append(select_entry)
job.review_form.t_jobs_crewing_roles = roles_items
(form.t_job_crewing_roles_dict.data is passed from a previous form, a stringified list of dictionaries in the form {'id': (person id), 'role': (role abbreviation in Jobs_crewing_roles table)})
The rest of the helper function is working as I get the correct id and label. But the QuerySelectField default isn't being set no matter what I try!
Edited to add:
The Flask debugger seems to think it's been set correctly, but it's not being selected in the form HTML...?:
[console ready]
>>> review_form.t_jobs_crewing_roles[0].role.default
<Job Crewing Roles Record: Engineer>
>>>

How can increment a sequence only once on a bulk insert?

Given a model like this:
class MyModel(Base):
__tablename__ = 'my_model'
id = Column(Integer, nullable=False, primary_key=True, index=True)
value = Column(Numeric, doc='value')
batch = Column(Integer, Sequence('my_model_batch_seq'), doc='Batch ID of the update')
I want to issue a batch insert that adds all the new objects with the same batch ID. The code below increments for each object which is not what I'm looking for.
objects = [
MyModel(
value=x,
) for x in range(10)
]
db.bulk_save_objects(objects)
If I've understood you correctly, you could first select the next value explicitly:
# Note that this may fail, if you haven't configured a bind on
# your Session.
batch = db.query(func.nextval('my_model_batch_seq')).scalar()
and then just pass it along:
objects = [
MyModel(
value=x,
batch=batch,
) for x in range(10)
]
db.bulk_save_objects(objects)

SQLAlchemy audit logging; how to handle deletes?

I'm using a modified version of the versioning code example that comes with SQLAlchemy to record a user id and date on changes. However, I also want to modify it so deletes are done by marking a is_deleted type flag instead of running an actual SQL DELETE. My problem is I'm not sure how to capture the delete and replace it with an update.
Here's what I have so far:
''' http://docs.sqlalchemy.org/en/rel_0_8/orm/examples.html?highlight=versioning#versioned-objects '''
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper, scoping
from sqlalchemy.orm.session import Session
from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
from sqlalchemy import Table, Column, ForeignKeyConstraint, DateTime, String, Boolean
from sqlalchemy import event
from sqlalchemy.orm.properties import RelationshipProperty
from datetime import datetime
from sqlalchemy.schema import ForeignKey
from sqlalchemy.sql.expression import false
def col_references_table(col, table):
for fk in col.foreign_keys:
if fk.references(table):
return True
return False
def _history_mapper(local_mapper):
cls = local_mapper.class_
# set the "active_history" flag
# on on column-mapped attributes so that the old version
# of the info is always loaded (currently sets it on all attributes)
for prop in local_mapper.iterate_properties:
getattr(local_mapper.class_, prop.key).impl.active_history = True
super_mapper = local_mapper.inherits
super_history_mapper = getattr(cls, '__history_mapper__', None)
polymorphic_on = None
super_fks = []
if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
cols = []
for column in local_mapper.local_table.c:
if column.name.startswith('version_'):
continue
col = column.copy()
col.unique = False
if super_mapper and col_references_table(column, super_mapper.local_table):
super_fks.append((col.key, list(super_history_mapper.local_table.primary_key)[0]))
cols.append(col)
if column is local_mapper.polymorphic_on:
polymorphic_on = col
if super_mapper:
super_fks.append(('version_datetime', super_history_mapper.base_mapper.local_table.c.version_datetime))
super_fks.append(('version_userid', super_history_mapper.base_mapper.local_table.c.version_userid))
super_fks.append(('version_deleted', super_history_mapper.base_mapper.local_table.c.version_deleted))
cols.append(Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=True, info={'colanderalchemy': {'exclude': True}}))
cols.append(Column('version_userid', String(60), ForeignKey("user.login"), nullable=True, info={'colanderalchemy': {'exclude': True}}))
cols.append(Column('version_deleted', Boolean, server_default=false(), nullable=False, info={'colanderalchemy': {'exclude': True}}))
else:
cols.append(Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=True, info={'colanderalchemy': {'exclude': True}}))
cols.append(Column('version_userid', String(60), ForeignKey("user.login"), nullable=True, info={'colanderalchemy': {'exclude': True}}))
cols.append(Column('version_deleted', Boolean, server_default=false(), nullable=False, info={'colanderalchemy': {'exclude': True}}))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
table = Table(local_mapper.local_table.name + '_history', local_mapper.local_table.metadata,
*cols
)
else:
# single table inheritance. take any additional columns that may have
# been added and add them to the history table.
for column in local_mapper.local_table.c:
if column.key not in super_history_mapper.local_table.c:
col = column.copy()
col.unique = False
super_history_mapper.local_table.append_column(col)
table = None
if super_history_mapper:
bases = (super_history_mapper.class_,)
else:
bases = local_mapper.base_mapper.class_.__bases__
versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
m = mapper(
versioned_cls,
table,
inherits=super_history_mapper,
polymorphic_on=polymorphic_on,
polymorphic_identity=local_mapper.polymorphic_identity
)
cls.__history_mapper__ = m
if not super_history_mapper:
local_mapper.local_table.append_column(
Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=False, info={'colanderalchemy': {'exclude': True}})
)
local_mapper.add_property("version_datetime", local_mapper.local_table.c.version_datetime)
local_mapper.local_table.append_column(
Column('version_userid', String(60), ForeignKey("user.login"), nullable=True, info={'colanderalchemy': {'exclude': True}})
)
local_mapper.add_property("version_userid", local_mapper.local_table.c.version_userid)
local_mapper.local_table.append_column(
Column('version_deleted', Boolean, server_default=false(), nullable=False, info={'colanderalchemy': {'exclude': True}})
)
local_mapper.add_property("version_deleted", local_mapper.local_table.c.version_deleted)
class Versioned(object):
#declared_attr
def __mapper_cls__(cls):
def map(cls, *arg, **kw):
mp = mapper(cls, *arg, **kw)
_history_mapper(mp)
return mp
return map
def versioned_objects(iter):
for obj in iter:
if hasattr(obj, '__history_mapper__'):
yield obj
def create_version(obj, session, deleted = False):
obj_mapper = object_mapper(obj)
history_mapper = obj.__history_mapper__
history_cls = history_mapper.class_
obj_state = attributes.instance_state(obj)
attr = {}
obj_changed = False
for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
if hm.single:
continue
for hist_col in hm.local_table.c:
if hist_col.key.startswith('version_'):
continue
obj_col = om.local_table.c[hist_col.key]
# get the value of the
# attribute based on the MapperProperty related to the
# mapped column. this will allow usage of MapperProperties
# that have a different keyname than that of the mapped column.
try:
prop = obj_mapper.get_property_by_column(obj_col)
except UnmappedColumnError:
# in the case of single table inheritance, there may be
# columns on the mapped table intended for the subclass only.
# the "unmapped" status of the subclass column on the
# base class is a feature of the declarative module as of sqla 0.5.2.
continue
# expired object attributes and also deferred cols might not be in the
# dict. force it to load no matter what by using getattr().
if prop.key not in obj_state.dict:
getattr(obj, prop.key)
a, u, d = attributes.get_history(obj, prop.key)
if d:
attr[hist_col.key] = d[0]
obj_changed = True
elif u:
attr[hist_col.key] = u[0]
else:
# if the attribute had no value.
attr[hist_col.key] = a[0]
obj_changed = True
if not obj_changed:
# not changed, but we have relationships. OK
# check those too
for prop in obj_mapper.iterate_properties:
if isinstance(prop, RelationshipProperty) and \
attributes.get_history(obj, prop.key).has_changes():
obj_changed = True
break
if not obj_changed and not deleted:
return
attr['version_datetime'] = obj.version_datetime
attr['version_userid'] = obj.version_userid
attr['version_deleted'] = obj.version_deleted
hist = history_cls()
for key, value in attr.items():
setattr(hist, key, value)
session.add(hist)
obj.version_datetime = datetime.now()
obj.version_userid = getattr(session, 'userid', None)
obj.version_deleted = deleted
def versioned_session(session):
#event.listens_for(session, 'before_flush')
def before_flush(session, flush_context, instances):
for obj in versioned_objects(session.deleted):
create_version(obj, session, deleted = True)
for obj in versioned_objects(session.dirty):
create_version(obj, session)
def add_userid_to_session(userid, session):
if isinstance(session, scoping.scoped_session):
thread_local_session = session.registry()
thread_local_session.userid = userid
elif isinstance(session, Session):
session.userid = userid
else:
raise TypeError("Not sure how to add the userid into session of type {}".format(type(session)))
And here's how I'm using it (all non-essential parts have been cut out):
Base = declarative_base()
class User(Versioned, Base):
__tablename__ = 'user'
login = Column(String(60), primary_key=True, nullable=False)
groups = association_proxy('user_to_groups', 'group', creator=lambda group: UserToGroup(group_name=group.name))
def __init__(self, login, groups=None):
self.login = login
if groups:
for group in groups:
self.groups.append(group)
class Group(Versioned, Base):
__tablename__ = 'group'
name = Column(String(100), primary_key=True, nullable=False)
description = Column(String(100), nullable=True)
users = association_proxy('group_to_user', 'user', creator=lambda user: UserToGroup(user_login=user.login))
def __eq__(self, other):
return self.name == other.name
class UserToGroup(Versioned, Base):
__tablename__ = 'user_to_group'
user_login = Column(String(60), ForeignKey(User.login), primary_key=true)
group_name = Column(String(100), ForeignKey(Group.name), primary_key=true)
user = relationship(User, backref=backref('user_to_groups', cascade='all, delete-orphan'))
group = relationship(Group, backref=backref('group_to_user', cascade='all, delete-orphan'))
session.configure(bind=engine)
add_userid_to_session("test", session.registry())
versioned_session(session)
user = session.query(User).filter(User.login=='test').one()
user.groups.remove(Group(name ="g:admin"))
Before running that code the database currently has one user called 'test' and two groups that the user is attached to called 'g:admin' and 'g:superadmin'.
What it currently does is: Copy the existing user_to_group entry for the 'test' => 'g:admin' mapping and copy it to the history table. Then delete the entry from user_to_group.
What I'd like it to do is copy the value to the history table and then update the entry in user_to_group to have version_deleted set to true.
I'm thinking the way to do that is to snatch the entry out of the session.deleted (that's why I changed the order from the original code) and modify it put it into session.dirty. I'm just not sure what the "safest" way of doing this.
Another issue (which will likely require another question) is how to detect relationships which are covered in another table as currently the system makes a copy of the 'user' row into the history table and then updates the version information despite no real changes being made to the row.
EDIT: I've decided to do things a bit differently, but still have a problem... Instead of having a "deleted" flag in the live tables I actually delete the content and record another history item indicating when the deletion occurred. If I'm deleting an object directly then this works correctly. If I delete an object off of a relationship I'm not able to do it properly. A DELETE get's issued to the relationship table to remove the link, but I can't seem to figure out how to detect that deletion in the "create_version" method.
For example, if I do:
group = session.query(Group).filter(Group.name=='g:admin').one()
group.users.remove(group.users[0])
No objects are placed in session.deleted. I can detect some sort of deletion via attributes.get_history(obj, prop.key), but it seems to indicate a deletion of a UserToGroup object from Group (which I want to detect and record a history item on), but then also indicates a deletion of a Group from the UserToGroup object (which I don't want to do anything about because the actual Group is not being deleted).

SQLAlchemy: Dynamically loading tables from a list

I am trying to create a program that loads in over 100 tables from a database so that I can change all appearances of a user's user id.
Rather than map all of the tables individually, I decided to use a loop to map each of the tables using an array of objects. This way, the table definitions can be stored in a config file and later updated.
Here is my code so far:
def init_model(engine):
"""Call me before using any of the tables or classes in the model"""
meta.Session.configure(bind=engine)
meta.engine = engine
class Table:
tableID = ''
primaryKey = ''
pkType = sa.types.String()
class mappedClass(object):
pass
WIW_TBL = Table()
LOCATIONS_TBL = Table()
WIW_TBL.tableID = "wiw_tbl"
WIW_TBL.primaryKey = "PORTAL_USERID"
WIW_TBL.pkType = sa.types.String()
LOCATIONS_TBL.tableID = "locations_tbl"
LOCATIONS_TBL.primaryKey = "LOCATION_CODE"
LOCATIONS_TBL.pkType = sa.types.Integer()
tableList = ([WIW_TBL, LOCATIONS_TBL])
for i in tableList:
i.tableID = sa.Table(i.tableID.upper(), meta.metadata,
sa.Column(i.primaryKey, i.pkType, primary_key=True),
autoload=True,
autoload_with=engine)
orm.mapper(i.mappedClass, i.tableID)
The error that this code returns is:
sqlalchemy.exc.ArgumentError: Class '<class 'changeofname.model.mappedClass'>' already has a primary mapper defined. Use non_primary=True to create a non primary Mapper. clear_mappers() will remove *all* current mappers from all classes.
I cant use clear_mappers as it wipes all of the classes and the entity_name scheme doesn't seem to apply here.
It seems that every object wants to use the same class, although they all should have their own instance of it.
Does anyone have any ideas?
Well, in your case it *is the same Class you try to map to different Tables. To solve this, create a class dynamically for each Table:
class Table(object):
tableID = ''
primaryKey = ''
pkType = sa.types.String()
def __init__(self):
self.mappedClass = type('TempClass', (object,), {})
But I would prefer slightly cleaner version:
class Table2(object):
def __init__(self, table_id, pk_name, pk_type):
self.tableID = table_id
self.primaryKey = pk_name
self.pkType = pk_type
self.mappedClass = type('Class_' + self.tableID, (object,), {})
# ...
WIW_TBL = Table2("wiw_tbl", "PORTAL_USERID", sa.types.String())
LOCATIONS_TBL = Table2("locations_tbl", "LOCATION_CODE", sa.types.Integer())

Error with dynamic classes and sqlalchemy

I am trying to write a logging system, which uses dynamic classes to make tables. Getting the classes created, and the tables created seems to be working fine, but trying to put entries into them is lead to an error message regarding mapping, below is the sample code and the error message.
Base = declarative_base()
#my init function
def tableinit(self,keyargs):
self.__dict__ = dict(keyargs)
#table creation
tableName = "newTable"
columnsDict["__tablename__"] = tableName
columnsDict["__init__"] = tableinit
columnsDict["id"] = Column("id",Integer, autoincrement = True, nullable = False, primary_key=True)
columnsDict["pid"] = Column("pid",Integer, ForeignKey('someparenttable.id')) #someparenttable is created with a hard coded class
newTable = type(tableName,(Base,),columnsDict)
tableClassDict[tableName]=newTable
#when doing an entry
newClassInst = subEntryClassDict[tableName]
newEntry = newClassInst(dataDict)
entryList.append(newEntry) # this is called in a for loop with the entries for someparenttable's entries also
self.session.add_all(entryList) # at this point the error occurs
The error:
UnmappedInstanceError: Class 'newTable' is mapped, but this instance lacks instrumentation. This occurs when the instance is created before sqlalchemy.orm.mapper(module.newTable) was called.
This is easier if you create a function to return a class that you set up normally. I've tried something like this and it works:
def getNewTable( db, table ):
class NewTable( Base ):
__tablename__ = table
__table_args__ = { 'schema': db }
id = Column( ...
return NewTable
newClassInst = getNewTable( 'somedb', 'sometable' )
newRow = newClassInst( data )
This problem is caused by lack of instruments function interfaces for the orm as the error description says. And it is actually caused by self.__dict__ = dict(keyargs) I think.
So this can be solved by reconstruct the init, which do not modify the injected functions by ORM.
Turn this
#my init function
def tableinit(self,keyargs):
self.__dict__ = dict(keyargs)
To
#my init function
def tableinit(self,**kwargs):
self.__dict__.update(kwargs)

Categories