I try to figure out how to structure my program which is a simple trading bot.
class "Exchange"
should store instances of the class "TradingPair"
class "Symbol"
stores all symbol related stuff
class "Websocket"
gets the ws stream and stores the ticks it in a dataframe in the TradingPair instance
located in Exchange.symbols[symbol_name].history
class "Indicators"
calculates for example a moving average and stores the values in
Exchange.symbols[symbol_name].history
Here are my questions:
To access Exchange.symbols I need a class variable so I can read/edit it from within other class instances. In Websocket / handle_symbol_ticker I have to write Exchange.symbols[self.symbol_name].history. Could this be done in a shorter manner. I did try history_pat = Exchange.symbols[self.symbol_name].history, but this generates a new object...
In Indicators / calc_ma I could not use loc[-1,colum_name] but had to use .index[-1]. What would be the best way do the index?
Here is the code:
import pandas as pd
class Exchange:
symbols = {}
class Symbol:
def __init__(self, base_asset, quote_asset):
self.base_asset = base_asset
self.quote_asset = quote_asset
self.symbol_name = self.base_asset + self.quote_asset
self.history = pd.DataFrame()
class Websocket(Exchange):
def __init__(self, symbol_name):
self.symbol_name = symbol_name
history_path = Exchange.symbols[self.symbol_name].history # doesn't work
def handle_symbol_ticker(self, msg: dict):
Exchange.symbols[self.symbol_name].history = pd.concat([
Exchange.symbols[self.symbol_name].history,
pd.DataFrame([msg])
]).set_index("event_time")
# def handle_symbol_ticker(self, msg: dict):
# history_path = pd.concat([ # <- doesn't work
# history_path,
# pd.DataFrame([msg])
# ]).set_index("event_time")
class Indicators(Exchange):
def __init__(self, symbol_name):
self.symbol_name = symbol_name
def calc_ma(self, timespan):
timespan_name = "ma_" + str(timespan)
Exchange.symbols[self.symbol_name].history.loc[
Exchange.symbols[self.symbol_name].history.index[-1],
timespan_name] \
= Exchange.symbols[self.symbol_name].history["close"].tail(timespan).mean()
if __name__ == "__main__":
bnc_exchange = Exchange()
bnc_exchange.symbols["axsbusd"] = Symbol("axs", "busd")
bnc_websocket = Websocket( "axsbusd")
bnc_indicators = Indicators("axsbusd")
bnc_exchange.symbols["axsbusd"].history = pd.DataFrame({
"event_time": [101,102,103,104,105],
"close": [50,51,56,54,53],
})
bnc_websocket.handle_symbol_ticker({
"event_time": 106,
"close": 54
})
bnc_indicators.calc_ma(3)
print(bnc_exchange.symbols["axsbusd"].history)
I have an app contains these models
class Transaction(models.Model):
chp_reference = models.CharField(max_length=50, unique=True)
rent_effective_date = ..
income_period = ..
property_market_rent =..
number_of_family_group = ..
cruser = ..
prop_id = ..
state = ..
group =..
class FamilyGroup(models.Model):
name = models.CharField(..
transaction =models.ForeignKey(Transaction,..
...
class FamilyMember(models.Model):
transaction = models.ForeignKey(Transaction, ..
family_group = models.ForeignKey(FamilyGroup..
name = models.CharField..
date_of_birth = models.DateField..
....
Im trying to make Imports app that will accept xlsx files with some certain format.
after i imported the models from the other apps, therefore i've created a model that have a field for each field i n the above models , i removed a lot so it look readable.
im trying to make it update_or_create since i think its the best approach to do, since maybe in future maybe i want to update some fields. I have created the first update_or_create for Transaction but since family_group and family_member are childs of Transaction and Inlines i cant figure out how to apply this. the main idea is i have a transaction contains family_groups and family_members inside it .
class Batch(models.Model):
batch = models.CharField(max_length=50)
transaction_chp_reference = models.CharField(unique=True)
transaction_rent_effective_date = models.DateField(..
transaction_property_market_rent = models.DecimalField(..
transaction_number_of_family_group = models.PositiveSmallIntegerField(..
family_group_name = models.CharField(..
family_group_family_type = models.CharField(..
family_group_alloc_id = models.PositiveIntegerField(..
family_group_last_rent = models.DecimalField(..
family_member_name = models.CharField(..
family_member_contact_id = models.PositiveIntegerField(..
family_member_surname = models.CharField(..
family_member_partnered = models.BooleanField(..
def __str__(self):
return str(self.batch)
def save(self, *args, **kwargs):
self.message = ''
if self.transaction_chp_reference:
trans, t = Transaction.objects.update_or_create(
# filter on the unique value of `chp_reference`
chp_reference=self.transaction_chp_reference,
# update these fields, or create a new object with these values
defaults={
'income_period':self.transaction_income_period,
'property_market_rent':self.transaction_property_market_rent,
'number_of_family_group':self.transaction_number_of_family_group,
'rent_effective_date':self.transaction_rent_effective_date,
'cruser':self.transaction_cruser,
'prop_id':self.transaction_prop_id,
'state':self.transaction_state,
}
)
self.message += 'Transaction "' + str(trans.chp_reference) + '" Created\n'
obj, mt = MaintenanceType.objects.update_or_create(
name=self.family_group_maintenance_type,
)
obj, ft = FamilySituation.objects.update_or_create(
name= self.family_group_family_type,
)
obj, fg = FamilyGroup.objects.update_or_create(
transaction=t,
name=self.family_group_name,
defaults={
'alloc_id':self.family_group_alloc_id,
'any_income_support_payment':self.family_group_any_income_support_payment,
'cra_amount':self.family_group_cra_amount,
'cra_eligibilty':self.family_group_cra_eligibilty,
'family_type':ft,
'ftb_a':self.family_group_ftb_a,
'ftb_b':self.family_group_ftb_b,
'last_rent':self.family_group_last_rent,
'maintenance_amount':self.family_group_maintenance_amount,
'maintenance_type':mt,
'name':self.family_group_name,
'number_of_additional_children':self.family_group_number_of_additional_children,
}
)
self.message += 'Family Group "' + str(obj.name) + '" Created\n'
now im getting an error when try to import xlsx file:
Cannot assign "False": "FamilyGroup.transaction" must be a "Transaction" instance.
Traceback:
Traceback (most recent call last):
File "E:\15-12\venv\lib\site-packages\django\db\models\query.py", line 575, in update_or_create
obj = self.select_for_update().get(**kwargs)
File "E:\15-12\venv\lib\site-packages\django\db\models\query.py", line 417, in get
self.model._meta.object_name
calculator.models.FamilyGroup.DoesNotExist: FamilyGroup matching query does not exist.
UPDATE
I have replaced the save() method with this code.
#receiver(post_save, sender=Batch)
def post_save_tranaction(sender, instance, created, **kwargs):
message = ''
if created:
Transaction.objects.update_or_create(
chp_reference=instance.transaction_chp_reference, defaults=
{rent_effective_date':instance.rent_effective_date,... , ... })
## now since FamilyGroup is a child (Foreignkey) to Transaction
## im not sure how to make it instance of Transaction
## FamilyMember is also a child of FamilyGroup and Transaction - same issue
## i tried this --->
transactions = []
transaction = Transaction.objects.all()
for i in transaction:
transactions.append(i.pk)
FamilyGroup.objects.update_or_create(name=instance.family_group_name,
transaction__in=transactions
)
I am trying to parse all plugin tags from an XML file and build a dict using a Plugin class I created. I really do not understand the error and the context of the error.
.append() does not seem to work when I try to initialize the plugin_dict as a list. Also when I try to call __init__() function like plugin = Plugin(), I get an error that Plugin needs at least 1 argument when I have already implemented a __init__(self) function which requires no arguments.
Here is the code from main.py
from plugin import Plugin
from bs4 import BeautifulSoup
def parse():
file = open('../data/windows.xml')
soup = BeautifulSoup(file, 'xml')
plugins = soup.find_all('plugin')
plugin_dict = []
for plugin in plugins:
plugin_dict.append(Plugin(plugin))
return plugin_dict
where plugin.py is:
class Plugin(object):
__name = 'Unknown'
__vendor = {}
__vendor_name = 'Unknown, Inc.'
__vendor_producturl = 'http://www.example.com/product'
__vendor = [__vendor_name, __vendor_producturl]
__version = {}
__version_code = '0.0.0.0'
__version_release_date = '01-01-1970'
__version = [__version_code, __version_release_date]
__properties = {}
__beta = False
__vst3 = False
__x64 = False
__category = 'synth/effect'
__properties = {__beta, __vst3, __x64, __category}
__magnet_uri = {}
__iss_links = {}
__download_uri = {}
__downloads = {}
__downloads = [__magnet_uri, __iss_links, __download_uri]
def __init__(self):
'''Create an empty instance for later use'''
self.name = str(__name)
self.version = {}
self.version.code = str(__version[0])
self.version.releasedate = str(__version[1])
self.version = [self.version.code, self.version.releasedate]
self.vendor = {}
self.vendor.name = str(__vendor_name)
self.vendor.producturl = str(__vendor_producturl)
self.vendor = [self.vendor.name, self.vendor.producturl]
self.properties = {}
self.properties.beta = bool(__properties[0])
self.properties.vst3 = bool(__properties[1])
self.properties.x64 = bool(__properties[2])
self.properties.category = str(__properties[3])
self.properties = [self.properties.beta, self.properties.vst3, self.properties.x64, self.properties.category]
self.magneturi = list(__magnet_uri)
self.isslinks = list(__iss_links)
self.downloaduri = list(__download_uri)
self.downloads = {}
self.downloads = [self.magneturi, self.isslinks, self.downloaduri]
self.product_dict = {}
self.product_dict = [self.name, self.vendor, self.properties, self.downloads]
return self.product_dict
def __init__(self, plugin):
self.name = plugin['name']
self.version = {}
self.version.code = plugin.version.code.string
self.version.releasedate = plugin.version.releasedate.string
self.version = [self.version.code, self.version.releasedate]
self.vendor= {}
self.vendor.name = plugin.vendor.string
self.vendor.producturl = plugin.vendor['url']
self.vendor = [self.vendor.name, self.vendor.producturl]
self.properties = {}
self.properties.beta = plugin['beta']
self.properties.vst3 = plugin['vst3']
self.properties.x64 = plugin['x64']
self.properties.category = plugin['category']
self.properties = [self.properties.beta, self.properties.vst3, self.properties.x64, self.properties.category]
magnet_uri_dict = {}
magnet_uri_dict = plugin.find_all('magnet')
for magnet_uri in magnet_uri_dict:
self.magneturi.append[magnet_uri.name, magnet_uri.string]
iss_link_dict = {}
iss_link_dict = plugin.find_all('iss/*')
for iss_link in iss_link_dict:
self.isslinks.append(iss_link.string)
download_uri_dict = {}
download_uri_dict = plugin.find_all('download-uri/*')
for download_uri in download_uri_dict:
self.downloaduri.append(download_uri.string)
self.downloads = {}
self.downloads = [self.magneturi, self.isslinks, self.downloaduri]
self.product_dict = {}
self.product_dict = [self.name, self.vendor, self.properties, self.downloads]
return self.product_dict
and a plugin tag in windows.xml looks like:
<plugin name="Serum" vst3="false" beta="false" x64="true" category="synth">
<vendor url="">Xfer Records</vendor>
<version>
<code>1.248</code>
<release-date>13-03-2019</release-date>
</version>
<download-uri>
<zippy>PLACEHOLDER</zippy>
<openload>PLACEHOLDER</openload>
<sr-files>PLACEHOLDER</sr-files>
</download-uri>
<iss>
<mirror1>PLACEHOLDER</mirror1>
<mirror2>PLACEHOLDER</mirror2>
<mirror3>PLACEHOLDER</mirror3>
</iss>
<magnet type="default"></magnet>
</plugin>
I think I am going wrong with defining self.version.something; I never saw an example like that. I just used it for better classification of the XML into an object
If you think that I should mark this as python-3.x also then please tell
You are trying to set an attribute on a dictionary
self.version = {}
self.version.code = plugin.version.code.string # This won't work
If you would like to set a key on the dictionary you need to use the following syntax
self.version['code'] = plugin.version.code.string
You have redefined the __init__ method of your class, python does not support this. In your code you have replaced the first definition with the second. Plugin() would fail with a message saying there was a missing required parameter "plugin"
Is it possible to call
tasks = models.Conference.objects.filter(location_id=key)
data = serializers.serialize("json", tasks)
and have it return the verbose field names rather than the variable names?
One way to accomplish this, is by monkey patching the methods within the django.core.serializers.python.Serializer class to return each fields verbose_name opposed to the standard name attribute.
Take for example the following code...
models.py
from django.db import models
class RelatedNode(models.Model):
name = models.CharField(max_length=100, verbose_name="related node")
class Node(models.Model):
name = models.CharField(max_length=100, verbose_name="verbose name")
related_node = models.ForeignKey(RelatedNode, verbose_name="verbose fk related node", related_name="related_node")
related_nodes = models.ManyToManyField(RelatedNode, verbose_name="verbose related m2m nodes", related_name="related_nodes")
I create these model objects within the database...
RelatedNode.objects.create(name='related_node_1')
RelatedNode.objects.create(name='related_node_2')
RelatedNode.objects.create(name='related_node_fk')
Node.objects.create(name='node_1', related_node=RelatedNode.objects.get(name='related_node_fk'))
Node.objects.all()[0].related_nodes.add(RelatedNode.objects.get(name='related_node_1'))
Node.objects.all()[0].related_nodes.add(RelatedNode.objects.get(name='related_node_2'))
views.py
from testing.models import Node
from django.utils.encoding import smart_text, is_protected_type
from django.core.serializers.python import Serializer
from django.core import serializers
def monkey_patch_handle_field(self, obj, field):
value = field._get_val_from_obj(obj)
# Protected types (i.e., primitives like None, numbers, dates,
# and Decimals) are passed through as is. All other values are
# converted to string first.
if is_protected_type(value):
self._current[field.verbose_name] = value
else:
self._current[field.verbose_name] = field.value_to_string(obj)
def monkey_patch_handle_fk_field(self, obj, field):
if self.use_natural_foreign_keys and hasattr(field.rel.to, 'natural_key'):
related = getattr(obj, field.name)
if related:
value = related.natural_key()
else:
value = None
else:
value = getattr(obj, field.get_attname())
self._current[field.verbose_name] = value
def monkey_patch_handle_m2m_field(self, obj, field):
if field.rel.through._meta.auto_created:
if self.use_natural_foreign_keys and hasattr(field.rel.to, 'natural_key'):
m2m_value = lambda value: value.natural_key()
else:
m2m_value = lambda value: smart_text(value._get_pk_val(), strings_only=True)
self._current[field.verbose_name] = [m2m_value(related)
for related in getattr(obj, field.name).iterator()]
Serializer.handle_field = monkey_patch_handle_field
Serializer.handle_fk_field = monkey_patch_handle_fk_field
Serializer.handle_m2m_field = monkey_patch_handle_m2m_field
serializers.serialize('json', Node.objects.all())
This outputs for me...
u'[{"fields": {"verbose fk related node": 3, "verbose related m2m nodes": [1, 2], "verbose name": "node_1"}, "model": "testing.node", "pk": 1}]'
As we could see, this actually gives us back the verbose_name of each field as keys in the returned dictionaries.
This is very similar to another question that's over 3 years old: What's a good general way to look SQLAlchemy transactions, complete with authenticated user, etc?
I'm working on an application where I'd like to log all changes to particular tables. There's currently a really good "recipe" that does versioning, but I need to modify it to instead record a datetime when the change occurred and a user id of who made the change. I took the history_meta.py example that's packaged with SQLAlchemy and made it record times instead of version numbers, but I'm having trouble figuring out how to pass in a user id.
The question I referenced above suggests including the user id in the session object. That makes a lot of sense, but I'm not sure how to do that. I've tried something simple like session.userid = authenticated_userid(request) but in history_meta.py that attribute doesn't seem to be on the session object any more.
I'm doing all of this in the Pyramid framework and the session object that I'm using is defined as DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension())). In a view I do session = DBSession() and then proceed to use session. (I'm not really sure if that's necessary, but that's what's going on)
Here's my modified history_meta.py in case someone might find it useful:
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import mapper, class_mapper, attributes, object_mapper
from sqlalchemy.orm.exc import UnmappedClassError, UnmappedColumnError
from sqlalchemy import Table, Column, ForeignKeyConstraint, DateTime
from sqlalchemy import event
from sqlalchemy.orm.properties import RelationshipProperty
from datetime import datetime
def col_references_table(col, table):
for fk in col.foreign_keys:
if fk.references(table):
return True
return False
def _history_mapper(local_mapper):
cls = local_mapper.class_
# set the "active_history" flag
# on on column-mapped attributes so that the old version
# of the info is always loaded (currently sets it on all attributes)
for prop in local_mapper.iterate_properties:
getattr(local_mapper.class_, prop.key).impl.active_history = True
super_mapper = local_mapper.inherits
super_history_mapper = getattr(cls, '__history_mapper__', None)
polymorphic_on = None
super_fks = []
if not super_mapper or local_mapper.local_table is not super_mapper.local_table:
cols = []
for column in local_mapper.local_table.c:
if column.name == 'version_datetime':
continue
col = column.copy()
col.unique = False
if super_mapper and col_references_table(column, super_mapper.local_table):
super_fks.append((col.key, list(super_history_mapper.local_table.primary_key)[0]))
cols.append(col)
if column is local_mapper.polymorphic_on:
polymorphic_on = col
if super_mapper:
super_fks.append(('version_datetime', super_history_mapper.base_mapper.local_table.c.version_datetime))
cols.append(Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=True))
else:
cols.append(Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=True))
if super_fks:
cols.append(ForeignKeyConstraint(*zip(*super_fks)))
table = Table(local_mapper.local_table.name + '_history', local_mapper.local_table.metadata,
*cols
)
else:
# single table inheritance. take any additional columns that may have
# been added and add them to the history table.
for column in local_mapper.local_table.c:
if column.key not in super_history_mapper.local_table.c:
col = column.copy()
col.unique = False
super_history_mapper.local_table.append_column(col)
table = None
if super_history_mapper:
bases = (super_history_mapper.class_,)
else:
bases = local_mapper.base_mapper.class_.__bases__
versioned_cls = type.__new__(type, "%sHistory" % cls.__name__, bases, {})
m = mapper(
versioned_cls,
table,
inherits=super_history_mapper,
polymorphic_on=polymorphic_on,
polymorphic_identity=local_mapper.polymorphic_identity
)
cls.__history_mapper__ = m
if not super_history_mapper:
local_mapper.local_table.append_column(
Column('version_datetime', DateTime, default=datetime.now, nullable=False, primary_key=False)
)
local_mapper.add_property("version_datetime", local_mapper.local_table.c.version_datetime)
class Versioned(object):
#declared_attr
def __mapper_cls__(cls):
def map(cls, *arg, **kw):
mp = mapper(cls, *arg, **kw)
_history_mapper(mp)
return mp
return map
def versioned_objects(iter):
for obj in iter:
if hasattr(obj, '__history_mapper__'):
yield obj
def create_version(obj, session, deleted = False):
obj_mapper = object_mapper(obj)
history_mapper = obj.__history_mapper__
history_cls = history_mapper.class_
obj_state = attributes.instance_state(obj)
attr = {}
obj_changed = False
for om, hm in zip(obj_mapper.iterate_to_root(), history_mapper.iterate_to_root()):
if hm.single:
continue
for hist_col in hm.local_table.c:
if hist_col.key == 'version_datetime':
continue
obj_col = om.local_table.c[hist_col.key]
# get the value of the
# attribute based on the MapperProperty related to the
# mapped column. this will allow usage of MapperProperties
# that have a different keyname than that of the mapped column.
try:
prop = obj_mapper.get_property_by_column(obj_col)
except UnmappedColumnError:
# in the case of single table inheritance, there may be
# columns on the mapped table intended for the subclass only.
# the "unmapped" status of the subclass column on the
# base class is a feature of the declarative module as of sqla 0.5.2.
continue
# expired object attributes and also deferred cols might not be in the
# dict. force it to load no matter what by using getattr().
if prop.key not in obj_state.dict:
getattr(obj, prop.key)
a, u, d = attributes.get_history(obj, prop.key)
if d:
attr[hist_col.key] = d[0]
obj_changed = True
elif u:
attr[hist_col.key] = u[0]
else:
# if the attribute had no value.
attr[hist_col.key] = a[0]
obj_changed = True
if not obj_changed:
# not changed, but we have relationships. OK
# check those too
for prop in obj_mapper.iterate_properties:
if isinstance(prop, RelationshipProperty) and \
attributes.get_history(obj, prop.key).has_changes():
obj_changed = True
break
if not obj_changed and not deleted:
return
attr['version_datetime'] = obj.version_datetime
hist = history_cls()
for key, value in attr.items():
setattr(hist, key, value)
session.add(hist)
print(dir(session))
obj.version_datetime = datetime.now()
def versioned_session(session):
#event.listens_for(session, 'before_flush')
def before_flush(session, flush_context, instances):
for obj in versioned_objects(session.dirty):
create_version(obj, session)
for obj in versioned_objects(session.deleted):
create_version(obj, session, deleted = True)
UPDATE:
Okay, it seems that in the before_flush() method the session I get is of type sqlalchemy.orm.session.Session where the session I attached the user_id to was sqlalchemy.orm.scoping.scoped_session. So, at some point an object layer is stripped off. Is it safe to assign the user_id to the Session within the scoped_session? Can I be sure that it won't be there for other requests?
Old question, but still very relevant.
You should avoid trying to place web session information on the database session. It's combining unrelated concerns and each has it's own lifecycle (which don't match). Here's an approach I use in Flask with SQLAlchemy (not Flask-SQLAlchemy, but that should work too). I've tried to comment where Pyramid would be different.
from flask import has_request_context # How to check if in a Flask session
from sqlalchemy import inspect
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm.attributes import get_history
from sqlalchemy.event import listen
from YOUR_SESSION_MANAGER import get_user # This would be something in Pyramid
from my_project import models # Where your models are defined
def get_object_changes(obj):
""" Given a model instance, returns dict of pending
changes waiting for database flush/commit.
e.g. {
'some_field': {
'before': *SOME-VALUE*,
'after': *SOME-VALUE*
},
...
}
"""
inspection = inspect(obj)
changes = {}
for attr in class_mapper(obj.__class__).column_attrs:
if getattr(inspection.attrs, attr.key).history.has_changes():
if get_history(obj, attr.key)[2]:
before = get_history(obj, attr.key)[2].pop()
after = getattr(obj, attr.key)
if before != after:
if before or after:
changes[attr.key] = {'before': before, 'after': after}
return changes
def my_model_change_listener(mapper, connection, target):
changes = get_object_changes(target)
changes.pop("modify_ts", None) # remove fields you don't want to track
user_id = None
if has_request_context():
# Call your function to get active user and extract id
user_id = getattr(get_user(), 'id', None)
if user_id is None:
# What do you want to do if user can't be determined
pass
# You now have the model instance (target), the user_id who is logged in,
# and a dictionary of changes.
# Either do somthing "quick" with it here or call an async task (e.g.
# Celery) to do something with the information that may take longer
# than you want the request to take.
# Add the listener
listen(models.MyModel, 'after_update', my_model_change_listener)
After a bunch of fiddling I seem to able to set values on the session object within the scoped_session by doing the following:
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
session = DBSession()
inner_session = session.registry()
inner_session.user_id = "test"
versioned_session(session)
Now the session object being passed around in history_meta.py has a user_id attribute on it which I set. I'm a little concerned about whether this is the right way of doing this as the object in the registry is a thread-local one and the threads are being re-used for different http requests.
I ran into this old question recently. My requirement is to log all changes to a set of tables.
I'll post the code I ended up with here in case anyone finds it useful. It has some limitations, especially around deletes, but works for my purposes. The code supports logging audit records for selected tables to either a log file, or an audit table in the db.
from app import db
import datetime
from flask import current_app, g
# your own session user goes here
# you'll need an id and an email in that model
from flask_user import current_user as user
import importlib
import logging
from sqlalchemy import event, inspect
from sqlalchemy.orm.attributes import get_history
from sqlalchemy.orm import ColumnProperty, class_mapper
from uuid import uuid4
class AuditManager (object):
config = {'storage': 'log',
#define class for Audit model for your project, if saving audit records in db
'auditModel': 'app.models.user_models.Audit'}
def __init__(self, app):
if 'AUDIT_CONFIG' in app.config:
app.before_request(self.before_request_handler)
self.config.update(app.config['AUDIT_CONFIG'])
event.listen(
db.session,
'after_flush',
self.db_after_flush
)
event.listen(
db.session,
'before_flush',
self.db_before_flush
)
event.listen(
db.session,
'after_bulk_delete',
self.db_after_bulk_delete
)
if self.config['storage'] == 'log':
self.logger = logging.getLogger(__name__)
elif self.config['storage'] == 'db':
# Load Audit model class at runtime, so that log file users dont need to define it
module_name, class_name = self.config['auditModel'].rsplit(".", 1)
self.AuditModel = getattr(importlib.import_module(module_name), class_name)
#Create a global request id
# Use this to group transactions together
def before_request_handler(self):
g.request_id = uuid4()
def db_after_flush(self, session, flush_context):
for instance in session.new:
if instance.__tablename__ in self.config['tables']:
# Record the inserts for this table
data = {}
auditFields = getattr(instance.__class__, 'Meta', None)
auditFields = getattr(auditFields,\
'auditFields', #Prefer to list auditable fields explicitly in the model's Meta class
self.get_fields(instance)) # or derive them otherwise
for attr in auditFields:
data[attr] = str(getattr(instance, attr, 'not set')) #Make every value a string in audit
self.log_it (session, 'insert', instance, data)
def db_before_flush(self, session, flush_context, instances):
for instance in session.dirty:
# Record the changes for this table
if instance.__tablename__ in self.config['tables']:
inspection = inspect(instance)
data = {}
auditFields = getattr(instance.__class__, 'Meta', None)
auditFields = getattr(auditFields,\
'auditFields',
self.get_fields(instance))
for attr in auditFields:
if getattr(inspection.attrs, attr).history.has_changes(): #We only log the new data
data[attr] = str(getattr(instance, attr, 'not set'))
self.log_it (session, 'change', instance, data)
for instance in session.deleted:
# Record the deletes for this table
# for this to be triggered, you must use this session based delete object construct.
# Eg: session.delete({query}.first())
if instance.__tablename__ in self.config['tables']:
data = {}
auditFields = getattr(instance.__class__, 'Meta', None)
auditFields = getattr(auditFields,\
'auditFields',
self.get_fields(instance))
for attr in auditFields:
data[attr] = str(getattr(instance, attr, 'not set'))
self.log_it (session, 'delete', instance, data)
def db_after_bulk_delete(self, delete_context):
instance = delete_context.query.column_descriptions[0]['type'] #only works for single table deletes
if delete_context.result.returns_rows:
# Not sure exactly how after_bulk_delete is expected work, since the context.results is empty,
# as delete statement return no results
for row in delete_context.result:
data = {}
auditFields = getattr(instance.__class__, 'Meta', None)
auditFields = getattr(auditFields,\
'auditFields',
self.get_fields(instance))
for attr in auditFields:
data[attr] = str(getattr(row, attr, 'not set')) #Make every value a string in audit
self.log_it (delete_context.session, 'delete', instance, data)
else:
# Audit what we can when we don't have indiividual rows to look at
self.log_it (delete_context.session, 'delete', instance,\
{"rowcount": delete_context.result.rowcount})
def log_it (self, session, action, instance, data):
if self.config['storage'] == 'log':
self.logger.info("request_id: %s, table: %s, action: %s, user id: %s, user email: %s, date: %s, data: %s" \
% (getattr(g, 'request_id', None), instance.__tablename__, action, getattr(user, 'id', None), getattr(user, 'email', None),\
datetime.datetime.now(), data))
elif self.config['storage'] == 'db':
audit = self.AuditModel(request_id=str(getattr(g, 'request_id', None)),
table=str(instance.__tablename__),
action=action,
user_id=getattr(user, 'id', None),
user_email=getattr(user, 'email', None),
date=datetime.datetime.now(),
data=data
)
session.add(audit)
def get_fields(self, instance):
fields = []
for attr in class_mapper(instance.__class__).column_attrs:
fields.append(attr.key)
return fields
Suggested Model, if you want to store audit records in the database.
class Audit(db.Model):
__tablename__ = 'audit'
id = db.Column(db.Integer, primary_key=True)
request_id = db.Column(db.Unicode(50), nullable=True, index=True, server_default=u'')
table = db.Column(db.Unicode(50), nullable=False, index=True, server_default=u'')
action = db.Column(db.Unicode(20), nullable=False, server_default=u'')
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete='SET NULL'), nullable=True, )
user_email = db.Column(db.Unicode(255), nullable=False, server_default=u'')
date = db.Column(db.DateTime, default=db.func.now())
data = db.Column(JSON)
In settings:
AUDIT_CONFIG = {
"tables": ['user', 'order', 'batch']
}