I am using Django and PostgreSQL (psycopg2) for one of our web REST API projects.
Basically, the app is driven by the django-rest-framework library for all REST API-centric tasks such as authentication, permission, serialization and API views. However, since our database tables are not created thru Django's migration system (rather created manually and directly thru DBeaver), our modeling and serialization can actually be considered as highly customized and is no longer following Django's ORM standard (although we try to write our custom ORM design to feel as close as Django's as possible so that the pattern will still feel familiar).
The way I communicate CRUD actions to and from the database is by creating one (1) custom manager class mapped to each custom model class that it's supposed to manage. So in my manager I have like get(), insert(), update(), delete(), force_delete() methods which contain logic for actually sending queries to the database.
For all methods responsible for fetching data i.e. get() all() filter(), they communicate through a database view instead of directly sending a query to the concerned table that may contain JOINs which may be too expensive for the DB server.
This design works fine for us, but I still ask myself if this is actually ideal or at least an acceptable approach specially on real-world, daily consumption of our API by potentially millions of clients.
Or is there any best practice which I should strictly follow? Or any better approach anyone may suggest?
Here are the sample classes for one of our API resources - API version:
DB table
Model Class
class ApiVersionModel:
objects = ApiVersionManager()
def __init__(self):
self.version = None
self.version_info = None
self.date_released = None
self.development = None
self.production = None
def save(self, initial=False):
if not initial:
self.objects.update(self)
else:
self.objects.insert(self)
def delete(self, force_delete=False):
if force_delete:
self.objects.delete(self)
else:
self.objects.soft_delete(self)
Serializer Class
class ApiVersionSerializer(serializers.Serializer):
version = serializers.CharField(max_length=15)
version_info = serializers.CharField(max_length=255, required=False, allow_null=True)
date_released = serializers.DateField()
development = serializers.BooleanField(default=True, required=False, allow_null=True)
production = serializers.BooleanField(default=False, required=False, allow_null=True)
def create(self, validated_data):
c = ApiVersionModel()
c.version = validated_data.get("version")
c.version_info = validated_data.get("version_info")
c.date_released = validated_data.get("date_released")
c.development = validated_data.get("development")
c.production = validated_data.get("production")
c.save(initial=True)
return c
def update(self, c, validated_data):
c.version = validated_data.get("version")
c.version_info = validated_data.get("version_info")
c.date_released = validated_data.get("date_released")
c.development = validated_data.get("development")
c.production = validated_data.get("production")
c.save()
return c
def delete(self, c, validated_data, force_delete=False):
c.version = validated_data.get("version")
c.version_info = validated_data.get("version_info")
c.date_released = validated_data.get("date_released")
c.development = validated_data.get("development")
c.production = validated_data.get("production")
c.delete(force_delete=force_delete)
return c
Manager Class
import traceback
from config.utils import (raw_sql_select, raw_sql_select_enhanced, raw_sql_insert, raw_sql_update, raw_sql_delete)
from unit2_app.utils.ModelUtil import where
class ApiVersionManager():
def __init__(self):
pass
#staticmethod
def all(**kwargs):
query = None
x = None
where_clause = where(**kwargs)
query = ("""
SELECT *
FROM sample_schema.api_version {};
""".format(where_clause))
x = raw_sql_select_enhanced(query, "slave", list(kwargs.values()))
if x is not None:
objects = []
from unit2_app.models.Sys import ApiVersionModel
for col in x[1]:
c = ApiVersionModel()
c.version = col.version
c.version_info = col.version_info
c.date_released = col.date_released
c.development = col.development
c.production = col.production
objects.append(c)
return [] if len(objects) == 0 else objects
return []
#staticmethod
def get(**kwargs):
query = None
x = None
where_clause = where(**kwargs)
query = ("""
SELECT *
FROM sample_schema.api_version {};
""".format(where_clause))
x = raw_sql_select_enhanced(query, "slave", list(kwargs.values()))
if x is not None:
objects = []
from unit2_app.models.Sys import ApiVersionModel
for col in x[1]:
c = ApiVersionModel()
c.version = col.version
c.version_info = col.version_info
c.date_released = col.date_released
c.development = col.development
c.production = col.production
objects.append(c)
return None if len(objects) == 0 else objects[0]
return None
#staticmethod
def filter(**kwargs):
query = None
x = None
where_clause = where(**kwargs)
query = ("""
SELECT *
FROM sample_schema.api_version {};
""".format(where_clause))
x = raw_sql_select_enhanced(query, "slave", list(kwargs.values()))
if x is not None:
objects = []
from unit2_app.models.Sys import ApiVersionModel
for col in x[1]:
c = ApiVersionModel()
c.version = col.version
c.version_info = col.version_info
c.date_released = col.date_released
c.development = col.development
c.production = col.production
objects.append(c)
return [] if len(objects) == 0 else objects
return []
#staticmethod
def insert(c):
try:
query = ("""
START TRANSACTION;
INSERT INTO sample_schema.api_version
(version, version_info, date_released, development, production)
VALUES (%(version)s, %(version_info)s, %(date_released)s, %(development)s, %(production)s);
""")
raw_sql_insert(query, "master", c.__dict__)
except Exception:
traceback.print_exc()
raise Exception("Unexpected manager exception has been encountered.")
#staticmethod
def update(c):
try:
query = ("""
START TRANSACTION;
UPDATE sample_schema.api_version SET
version_info = %(version_info)s,
date_released = %(date_released)s,
development = %(development)s,
production = %(production)s
WHERE version = %(version)s;
""")
raw_sql_update(query, "master", c.__dict__)
except Exception:
raise Exception("Unexpected manager exception has been encountered.")
#staticmethod
def delete(c):
try:
print(c.__dict__)
query = ("""
START TRANSACTION;
DELETE FROM sample_schema.api_version WHERE version=%(version)s;
""")
raw_sql_delete(query, "master", c.__dict__)
except Exception:
raise Exception("Something went wrong with the database manager.")
#staticmethod
def soft_delete(c):
pass
API View Class
class APIView_ApiVersion(views.APIView):
try:
serializer_class = ApiVersionSerializer
permission_classes = (IsAuthenticatedOrReadOnly,)
authentication_classes = ()
except:
traceback.print_exc()
def get_queryset(self, **fltr):
return self.serializer_class(ApiVersionModel.objects.all(**fltr), many=True).data
def get(self, request, **kwargs):
try:
fltr = {k:v[0] for k,v in dict(self.request.GET).items()}
return_data = None
url_path_param_version = None
return_data = self.get_queryset(**fltr)
# perform filtering for version if <version> path param
# ... is present in the URL
if request.resolver_match.kwargs and request.resolver_match.kwargs["version"]:
url_path_param_version = request.resolver_match.kwargs["version"]
return_data = ApiVersionModel.objects.get(version=url_path_param_version, **fltr)
else:
return_data = ApiVersionModel.objects.all(**fltr)
if isinstance(return_data, list):
if len(return_data) > 0:
return Response({
"success": True,
"message": "API version has been fetched successfully.",
"data": self.serializer_class(return_data, many=True).data
}, status=status.HTTP_200_OK)
else:
return Response({
"success": True,
"message": HTTPNotFound.resource_empty(None, obj="API version"),
"data": []
}, status=status.HTTP_200_OK)
else:
if return_data:
return Response({
"success": True,
"message": "API version has been fetched successfully.",
"data": self.serializer_class(return_data).data
}, status=status.HTTP_200_OK)
else:
return Response({
"success": False,
"message": HTTPNotFound.object_unknown(None, obj="API version")
}, status=HTTPNotFound.status_code)
except Exception as e:
return Response({
"success": False,
"message": str(HTTPServerError.unknown_error(None)) + " DETAIL: {}".format(str(e))
}, status=HTTPServerError.status_code)
# Other METHODS also go here i.e. post(), update(), etc.
Custom utility for dynamic API resource filtering / dynamic WHERE CLAUSE
Since our ORM is highly customized, it hinders us from using DRF's inbuilt filtering classes. So I created my own simple utility to optionally allow filtering of SELECT queries via a query string. When applied, the value that the where() method generates gets injected into the DB query in our custom managers.
def filter(key, val):
f = []
c = ""
operator = "="
if len(key.split('__')) > 1:
dus = key.split('__')
for x in range(len(dus)):
f.append(str('{}' if x == 0 else "'{}'").format(dus[x]))
else:
f.append(key)
c = c.join(f) + " {} {} ".format(operator, str(val))
if len(key.split('___')) > 1:
tus = key.split('___')
for x in range(len(tus)):
if tus[x] == "lt":
operator = "<"
elif tus[x] == "lte":
operator = "<="
elif tus[x] == "gt":
operator = ">"
elif tus[x] == "gte":
operator = ">="
for y in f:
if tus[x] in y:
f.remove(y)
y = ""
if len(f) > 2:
for x in range(len(f)):
if x < len(f)-2:
y += f[x] + "->"
elif x <= len(f)-2:
y += f[x] + "->>"
elif x >= len(f)-2:
y += f[x]
else:
y += "->>".join(f)
if val is not None:
if isinstance(val, bool):
c = y + " {} '{}' ".format(operator, str(val).lower())
else:
c = y + " {} '{}' ".format(operator, str(val))
else:
c = y + " IS NULL "
return c
def where(**kwargs):
fields = []
if bool(kwargs):
for key, val in kwargs.items():
# fields.append(filter_nest(key, val))
fields.append(filter(key,val))
return ' WHERE ' + ' AND '.join(fields) if bool(kwargs) else ""
Related
I've run into a bit of a snag on this code and could use some help, I can't manage to figure out what is the issue here.
File "C:\Users\Joshua\Desktop\sasquatch_sightings\flask_app\controllers\users.py", line 34, in user_dashboard
return render_template("dashboard.html",user=User.get_user_by_id(data),sightings=Sighting.get_all())
File "C:\Users\Joshua\Desktop\sasquatch_sightings\flask_app\models\user.py", line 44, in get_user_by_id
return cls(results[0])
IndexError: tuple index out of range
My Code
Users.py
from flask_app import app
from flask import render_template, redirect, request, session, flash
from flask_app.models.user import User
from flask_app.models.sighting import Sighting
from flask_bcrypt import Bcrypt
bcrypt = Bcrypt(app)
#app.route('/')
def index():
return render_template('index.html')
#app.route('/user/create', methods={'POST'})
def create_user():
if not User.validate_user_reg_data(request.form):
return redirect('/')
data = {
"first_name": request.form['first_name'],
"last_name": request.form['last_name'],
"email": request.form['email'],
"password": bcrypt.generate_password_hash(request.form['password'])
}
id = User.create_user(data)
session['user_id'] = id
return redirect('/user/dashboard')
#app.route('/user/dashboard')
def user_dashboard():
if 'user_id' not in session:
return redirect('/logout')
data = {
'id': session ['user_id']
}
return render_template("dashboard.html",user=User.get_user_by_id(data),sightings=Sighting.get_all())
#app.route('/user/login', methods = ['POST'])
def login():
user = User.get_user_by_email(request.form)
if not user:
flash("Invalid Email, login")
return redirect('/')
session['user_id'] = user.id
return redirect('/user/dashboard')
#app.route('/user/logout')
def logout():
session.clear()
return redirect('/')
2nd Code User.py
from flask_app.config.mysqlconnection import MySQLConnection, connectToMySQL
from flask import flash, session
import re
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+#[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
class User:
db = "sasquatch"
def __init__(self,data):
self.id = data['id']
self.first_name = data['first_name']
self.last_name = data['last_name']
self.email = data['email']
self.password = data['password']
self.created_at = data['created_at']
self.updated_at = data['updated_at']
#classmethod
def create_user(cls, data):
data = cls.parse_registration_data(data)
query = "INSERT INTO users (first_name, last_name, email, password) VALUES (%(first_name)s, %(last_name)s, %(email)s, %(password)s);"
return connectToMySQL(cls.db).query_db(query, data)
#classmethod
def get_all(cls):
query = "SELECT * FROM users;"
results = connectToMySQL(cls.db).query_db(query)
users = []
for row in results:
users.append( cls(row))
return users
#classmethod
def get_user_by_email(cls, data):
query = "SELECT * FROM users WHERE email = %(email)s;"
results = connectToMySQL(cls.db).query_db(query,data)
if len(results) < 1:
return False
return cls(results[0])
#classmethod
def get_user_by_id(cls,data):
query = "SELECT * FROM users WHERE id = %(id)s;"
results = connectToMySQL(cls.db).query_db(query,data)
return cls(results[0])
#staticmethod
def validate_user_reg_data(user):
is_valid = True
query = "SELECT * FROM users WHERE email = %(email)s;"
results = connectToMySQL(User.db).query_db(query,user)
if len(results) >= 1:
flash("Email already taken.","register")
is_valid=False
if not EMAIL_REGEX.match(user['email']):
flash("Invalid Email","register")
is_valid=False
if len(user['first_name']) < 2:
flash("First name must be more than 2 characters","register")
is_valid= False
if len(user['last_name']) < 2:
flash("Last name must be more than 2 characters", "register")
if len(user['password']) < 8:
flash("Password must be more than 8 characters","register")
is_valid= False
if user['password'] != user['confirm']:
flash("Passwords do not match","register")
return is_valid
#staticmethod
def parse_registration_data(data):
parsed_data = {}
parsed_data['email'] = data['email'].lower()
parsed_data['first_name'] = data['first_name']
parsed_data['last_name'] = data['last_name']
return parsed_data
I've recently stumbled upon this very useful code which I got from this link https://github.com/Douglas6/cputemp.
I've modified the code for clarity reasons and so "application" class is imported as an external class. I'm trying to understand why "set_temperature_callback" is called indefinitely in a loop. The interval of the loop is defined by NOTIFY_TIMEOUT. Here the interval is set to 1000 ms. I'm using this code as a peripheral for one of my Unity projects and it's working fine. I really appreciate any help!
import dbus
from advertisement import Advertisement
from service import Service, Characteristic, Descriptor
from gpiozero import CPUTemperature
#testing code
#------------------------------------------#
from application import Application
import inspect as i
import sys
def FindSource(method):
sys.stdout.write(i.getsource(method))
#------------------------------------------#
GATT_CHRC_IFACE = "org.bluez.GattCharacteristic1"
#the interval between sending messages, is counted in milliseconds, was 5000
NOTIFY_TIMEOUT = 1000
class ThermometerAdvertisement(Advertisement):
def __init__(self, index):
Advertisement.__init__(self, index, "peripheral")
self.add_local_name("Raspberry (thermometer)")
self.include_tx_power = True
class ThermometerService(Service):
#the service we're interested and is displayed in unity
THERMOMETER_SVC_UUID = "00000001-710e-4a5b-8d75-3e5b444bc3cf"
def __init__(self, index):
self.farenheit = True
Service.__init__(self, index, self.THERMOMETER_SVC_UUID, True)
self.add_characteristic(TempCharacteristic(self))
self.add_characteristic(UnitCharacteristic(self))
def is_farenheit(self):
return self.farenheit
def set_farenheit(self, farenheit):
self.farenheit = farenheit
class TempCharacteristic(Characteristic):
TEMP_CHARACTERISTIC_UUID = "00000002-710e-4a5b-8d75-3e5b444bc3cf"
def __init__(self, service):
self.notifying = False
Characteristic.__init__(
self, self.TEMP_CHARACTERISTIC_UUID,
["notify", "read"], service)
self.add_descriptor(TempDescriptor(self))
def get_temperature(self):
value = []
unit = "C"
# code here repeats
cpu = CPUTemperature()
temp = cpu.temperature
if self.service.is_farenheit():
temp = (temp * 1.8) + 32
unit = "F"
strtemp = str(round(temp, 1)) + " " + unit
for c in strtemp:
value.append(dbus.Byte(c.encode()))
return value
# the entire method will run on a loop when unity app hits "subscribe"
def set_temperature_callback(self):
if self.notifying:
print("running inside set_temperature_callback")
value = self.get_temperature()
self.PropertiesChanged(GATT_CHRC_IFACE, {"Value": value}, [])
return self.notifying
def StartNotify(self):
print("Start notify first time")
if self.notifying:
print("returning from start notify")
return
self.notifying = True
value = self.get_temperature()
self.PropertiesChanged(GATT_CHRC_IFACE, {"Value": value}, [])
self.add_timeout(NOTIFY_TIMEOUT, self.set_temperature_callback)
def StopNotify(self):
self.notifying = False
def ReadValue(self, options):
value = self.get_temperature()
return value
class TempDescriptor(Descriptor):
TEMP_DESCRIPTOR_UUID = "2901"
TEMP_DESCRIPTOR_VALUE = "CPU Temperature test"
def __init__(self, characteristic):
Descriptor.__init__(
self, self.TEMP_DESCRIPTOR_UUID,
["read"],
characteristic)
def ReadValue(self, options):
value = []
desc = self.TEMP_DESCRIPTOR_VALUE
for c in desc:
value.append(dbus.Byte(c.encode()))
return value
class UnitCharacteristic(Characteristic):
UNIT_CHARACTERISTIC_UUID = "00000003-710e-4a5b-8d75-3e5b444bc3cf"
def __init__(self, service):
Characteristic.__init__(
self, self.UNIT_CHARACTERISTIC_UUID,
["read", "write"], service)
self.add_descriptor(UnitDescriptor(self))
def WriteValue(self, value, options):
val = str(value[0]).upper()
if val == "C":
self.service.set_farenheit(False)
elif val == "F":
self.service.set_farenheit(True)
def ReadValue(self, options):
value = []
if self.service.is_farenheit(): val = "F"
else: val = "C"
value.append(dbus.Byte(val.encode()))
return value
class UnitDescriptor(Descriptor):
UNIT_DESCRIPTOR_UUID = "2901"
UNIT_DESCRIPTOR_VALUE = "Temperature Units (F or C)"
def __init__(self, characteristic):
Descriptor.__init__(
self, self.UNIT_DESCRIPTOR_UUID,
["read"],
characteristic)
def ReadValue(self, options):
value = []
desc = self.UNIT_DESCRIPTOR_VALUE
for c in desc:
value.append(dbus.Byte(c.encode()))
return value
app = Application()
app.add_service(ThermometerService(0))
app.register()
adv = ThermometerAdvertisement(0)
adv.register()
Is there a sure-fire way to check that the class of an object is a sub-class of the desired super?
For Example, in a migration script that I'm writing, I have to convert objects of a given type to dictionaries in a given manner to ensure two-way compatability of the data.
This is best summed up like so:
Serializable
User
Status
Issue
Test
Set
Step
Cycle
However, when I'm recursively checking objects after depickling, I receive a Test object that yields the following results:
Testing data object type:
type(data)
{type}< class'__main.Test' >
Testing Class type:
type(Test())
{type}< class'__main.Test' >
Testing object type against class type:
type(Test()) == type(data)
{bool}False
Testing if object isinstance() of Class:
isinstance(data, Test)
{bool}False
Testing if Class isinstance() of Super Class:
isinstance(Test(), Serializable)
{bool}True
Testing isinstance() of Super Class::
isinstance(data, Serializable)
{bool}False
Interestingly, it doesn't appear to have any such problem prior to pickling as it handles the creation of dictionary and integrity hash just fine.
This only crops up with depickled objects in both Pickle and Dill.
For Context, here's the code in it's native environment - the DataCache object that is pickled:
class DataCache(object):
_hash=""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data,list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data,(dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key,value in datahash.iteritems():
datahash[key]= DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict = {}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
Finally, I know there's arguments for using JSON for readability in storage, I needed Pickle's ability to convert straight to and from Objects without specifying the object type myself. (thanks to the nesting, it's not really feasible)
Am I going mad here or does pickling do something to the class definitions?
EDIT:
Minimal Implementation:
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import requests
from aenum import Enum
import json # _tricks
import base64
import argparse
import os
import sys
import datetime
import dill
import hashlib
import collections
class Serializable(object):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
def __str__(self):
return str(self.sortSelf())
def sortSelf(self):
return collections.OrderedDict(sorted(self.__dict__.items()))
def toDict(self):
return self.__dict__
def fromDict(self, dict):
# Not using __dict__.update(...) to avoid polluting objects with the excess data
varMap = self.__dict__
if dict and varMap:
for key in varMap:
if (key in dict):
varMap[key] = dict[key]
self.__dict__.update(varMap)
return self
return None
class Issue(Serializable):
def __init__(self, initDict={}):
self.id = 0
self.key = ""
self.fields = {}
if initDict:
self.__dict__.update(initDict)
Serializable.__init__(self)
def fieldToDict(self, obj, key, type):
if key in obj:
result = obj[key]
else:
return None
if result is None:
return None
if isinstance(result, type):
return result.toDict()
return result
def fromDict(self, jsonDict):
super(Issue, self).fromDict(jsonDict)
self.fields["issuetype"] = IssueType().fromDict(self.fields["issuetype"])
self.fields["assignee"] = User().fromDict(self.fields["assignee"])
self.fields["creator"] = User().fromDict(self.fields["creator"])
self.fields["reporter"] = User().fromDict(self.fields["reporter"])
return self
def toDict(self):
result = super(Issue, self).toDict()
blankKeys = []
for fieldName, fieldValue in self.fields.iteritems():
if fieldValue is None:
blankKeys.append(fieldName)
if blankKeys:
for key in blankKeys:
self.fields.pop(key, None)
result["fields"]["issuetype"] = self.fieldToDict(result["fields"], "issuetype", IssueType)
result["fields"]["creator"] = self.fieldToDict(result["fields"], "creator", User)
result["fields"]["reporter"] = self.fieldToDict(result["fields"], "reporter", User)
result["fields"]["assignee"] = self.fieldToDict(result["fields"], "assignee", User)
return result
class IssueType(Serializable):
def __init__(self):
self.id = 0
self.name = ""
def toDict(self):
return {"id": str(self.id)}
class Project(Serializable):
def __init__(self):
Serializable.__init__(self)
self.id = 0
self.name = ""
self.key = ""
class Cycle(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.totalExecutions = 0
self.endDate = ""
self.description = ""
self.totalExecuted = 0
self.started = ""
self.versionName = ""
self.projectKey = ""
self.versionId = 0
self.environment = ""
self.totalCycleExecutions = 0
self.build = ""
self.ended = ""
self.name = ""
self.modifiedBy = ""
self.projectId = 0
self.startDate = ""
self.executionSummaries = {'executionSummary': []}
class Step(Serializable):
def __init__(self):
self.id = ""
self.orderId = 0
self.step = ""
self.data = ""
self.result = ""
self.attachmentsMap = {}
def toDict(self):
dict = {}
dict["step"] = self.step
dict["data"] = self.data
dict["result"] = self.result
dict["attachments"] = []
return dict
class Status(Serializable):
def __init__(self):
self.id = 0
self.name = ""
self.description = ""
self.isFinal = True
self.color = ""
self.isNative = True
self.statusCount = 0
self.statusPercent = 0.0
class User(Serializable):
def __init__(self):
self.displayName = ""
self.name = ""
self.emailAddress = ""
self.key = ""
self.active = False
self.timeZone = ""
class Execution(Serializable):
def __init__(self):
self.id = 0
self.orderId = 0
self.cycleId = -1
self.cycleName = ""
self.issueId = 0
self.issueKey = 0
self.projectKey = ""
self.comment = ""
self.versionId = 0,
self.versionName = "",
self.executedOn = ""
self.creationDate = ""
self.executedByUserName = ""
self.assigneeUserName = ""
self.status = {}
self.executionStatus = ""
def fromDict(self, jsonDict):
super(Execution, self).fromDict(jsonDict)
self.status = Status().fromDict(self.status)
# This is already listed as Execution Status, need to associate and convert!
return self
def toDict(self):
result = super(Execution, self).toDict()
result['status'] = result['status'].toDict()
return result
class ExecutionContainer(Serializable):
def __init__(self):
self.executions = []
def fromDict(self, jsonDict):
super(ExecutionContainer, self).fromDict(jsonDict)
self.executions = []
for executionDict in jsonDict["executions"]:
self.executions.append(Execution().fromDict(executionDict))
return self
class Test(Issue):
def __init__(self, initDict={}):
if initDict:
self.__dict__.update(initDict)
Issue.__init__(self)
def toDict(self):
result = super(Test, self).toDict()
stepField = "CustomField_0001"
if result["fields"][stepField]:
steps = []
for step in result["fields"][stepField]["steps"]:
steps.append(step.toDict())
result["fields"][stepField] = steps
return result
def fromDict(self, jsonDict):
super(Test, self).fromDict(jsonDict)
stepField = "CustomField_0001"
steps = []
if stepField in self.fields:
for step in self.fields[stepField]["steps"]:
steps.append(Step().fromDict(step))
self.fields[stepField] = {"steps": steps}
return self
class Set(Issue):
def __init__(self, initDict={}):
self.__dict__.update(initDict)
Issue.__init__(self)
class DataCache(object):
_hash = ""
_data = None
#staticmethod
def genHash(data):
dataDict = DataCache.dictify(data)
datahash = json.dumps(dataDict, sort_keys=True)
return hashlib.sha256(datahash).digest()
#staticmethod
def dictify(data):
if isinstance(data, list):
datahash = []
for item in data:
datahash.append(DataCache.dictify(item))
elif isinstance(data, (dict, collections.OrderedDict)):
datahash = collections.OrderedDict()
for key, value in datahash.iteritems():
datahash[key] = DataCache.dictify(value)
elif isinstance(data, Serializable):
datahash = data.toDict()
else:
datahash = data
return datahash
def __init__(self, restoreDict={}):
if restoreDict:
self.__dict__.update(restoreDict)
def __getinitargs__(self):
return (self.__dict__)
def set(self, data):
self._hash = DataCache.genHash(data)
self._data = data
def verify(self):
dataHash = DataCache.genHash(self._data)
return (self._hash == dataHash)
def get(self):
return self._data
def saveCache(name, projectKey, object):
filePath = "migration_caches/{projectKey}".format(projectKey=projectKey)
if not os.path.exists(path=filePath):
os.makedirs(filePath)
cache = DataCache()
cache.set(object)
targetFile = open("{path}/{name}".format(name=name, path=filePath), 'wb')
dill.dump(obj=cache, file=targetFile)
targetFile.close()
def loadCache(name, projectKey):
filePath = "migration_caches/{projectKey}/{name}".format(name=name, projectKey=projectKey)
result = False
try:
targetFile = open(filePath, 'rb')
try:
cache = dill.load(targetFile)
if isinstance(cache, DataCache):
if cache.verify():
result = cache.get()
except EOFError:
# except BaseException:
print ("Failed to load cache from file: {filePath}\n".format(filePath=filePath))
except IOError:
("Failed to load cache file at: {filePath}\n".format(filePath=filePath))
targetFile.close()
return result
testIssue = Test().fromDict({"id": 1000,
"key": "TEST",
"fields": {
"issuetype": {
"id": 1,
"name": "TestIssue"
},
"assignee": "Minothor",
"reporter": "Minothor",
"creator": "Minothor",
}
})
saveCache("Test", "TestProj", testIssue)
result = loadCache("Test", "TestProj")
EDIT 2
The script in it's current form, now seems to work correctly with vanilla Pickle, (initially switched to Dill due to a similar issue, which was solved by the switch).
However, if you are here with this issue and require Dill's features, then as Mike noted in the comments - it's possible to change the settings in dill.settings to have Dill behave pickle referenced items only with joblib mode, effectively mirroring pickle's standard pickling behaviour.
I'm trying to filter my flask-sqlalchemy query via strings. My procedure for creating the string results in the following array:
["user.name == 'Sid'", "user.role == 'admin'"]
It gets formed with something like this:
for i in myarray:
filter_string = "%s.%s == '%s'" % (self.model.__tablename__, i[0], i[2])
or_filters.append(filter_string)
Here is how I'm using it:
db = SQLAlchemy(app)
...
class myclass:
def __init(self, model):
self.model = model
self.q = db.session.query(self.model)
def get(self, cfg):
...
# the following line works
myfilter = [User.name == 'Sid', User.role == 'admin']
# the following line does not work, but I need it to. How to modify into the line above?
myfilter = ["user.name == 'Sid'", "user.role == 'admin'"]
if myfilter is not None:
self.q = self.apply_filter(myfilter)
items = self.q.all()
return items
def apply_filter(self, ftr):
self.q.filter(or_(*ftr))
Ilja had a good solution
myfilter = getattr(self.model, i[0]).is_(i[2])
if myfilter is not None:
self.q = self.apply_filter(myfilter)
I am new to Web2py and am trying to use a custom validator.
class IS_NOT_EMPTY_IF_OTHER(Validator):
def __init__(self, other,
error_message='must be filled because other value '
'is present'):
self.other = other
self.error_message = error_message
def __call__(self, value):
if isinstance(self.other, (list, tuple)):
others = self.other
else:
others = [self.other]
has_other = False
for other in others:
other, empty = is_empty(other)
if not empty:
has_other = True
break
value, empty = is_empty(value)
if empty and has_other:
return (value, T(self.error_message))
else:
return (value, None)
I do not understand how to use it on my table:
db.define_table('numbers',
Field('a', 'integer'),
Field('b', 'boolean'),
Field('c', 'integer')
I want to use this in a way that 'c' cannot be left black when 'b' is ticked.
save the code on /modules/customvalidators.py
from gluon.validators import is_empty
from gluon.validators import Validator
class IS_NOT_EMPTY_IF_OTHER(Validator):
def __init__(self, other,
error_message='must be filled because other value '
'is present'):
self.other = other
self.error_message = error_message
def __call__(self, value):
if isinstance(self.other, (list, tuple)):
others = self.other
else:
others = [self.other]
has_other = False
for other in others:
other, empty = is_empty(other)
if not empty:
has_other = True
break
value, empty = is_empty(value)
if empty and has_other:
return (value, T(self.error_message))
else:
return (value, None)
then in models/db.py
from customvalidator import IS_NOT_EMPTY_IF_OTHER
db.define_table("foo",
Field('a', 'integer'),
Field('b', 'boolean'),
Field('c', 'integer')
)
# apply the validator
db.foo.c.requires = IS_NOT_EMPTY_IF_OTHER(request.vars.b)
Also, note that it can be done easily without the above validator.
Forget all the code above and try this simplified way
Version 2:
controllers/default.py
def check(form):
if form.vars.b and not form.vars.c:
form.errors.c = "If the b is checked, c must be filled"
def action():
form = SQLFORM(db.foo)
if form.process(onvalidation=check).accepted:
response.flash = "success"
return dict(form=form)