I have the class below to handle my Postgres DB and I'm running into trouble with multiple inserts where foreign keys are involved. If I insert first in a parent table and then a child table I get a foreign key violation error although I think I have all the deferrable things in place. (autocommit is not enabled)
The constraint on the foreign key is set as follows:
CONSTRAINT tblorganisations_status_tblorganisations_fkey FOREIGN KEY (org_id)
REFERENCES organisations.tblorganisations (org_id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION DEFERRABLE INITIALLY IMMEDIATE;
The code that calls the class:
postgres = Postgresql("organisations")
r = postgres.insert(self.db_table, data, return_cols='org_id')
self.org_id = r['org_id']
postgres.insert('tblorganisations_status',
{'org_id': self.org_id,
'org_status_id': 'NEW_CGM'})
postgres.commit()
And the class:
class Postgresql():
conn = None
cur = None
last_result = None
def __init__(self, schema=None):
reload(sys) # Reload does the trick!
sys.setdefaultencoding("utf-8")
self.log = Log()
self.connect()
if schema is not None:
self.schema = schema
self.set_default_schema(schema)
def connection_string(self):
return 'host=%s port=%s dbname=%s user=%s password=%s' % \
(get_config('DATABASE', 'host'),
get_config('DATABASE', 'port'),
get_config('DATABASE', 'dbname'),
get_config('DATABASE', 'user'),
get_config('DATABASE', 'password'))
def connect(self):
try:
self.conn = psycopg2.connect(self.connection_string())
self.conn.set_session(isolation_level='read uncommitted', deferrable=True)
self.cur = self.conn.cursor(cursor_factory=RealDictCursor)
except Exception, e:
self.log.error(e.message)
raise
def set_default_schema(self, schema):
try:
self.cur.execute("SET search_path TO %s,public;", (schema, ))
except Exception, e:
self.log.error(e.message)
raise
def commit(self):
self.conn.commit()
self.close()
def rollback(self):
self.conn.rollback()
self.close()
def close(self):
self.cur.close()
self.conn.close()
def insert(self, table, data, return_cols=None, **kwargs):
data = self.cleanup_data(table, data)
fields = data.keys()
if self.schema is not None:
table = self.schema + '.' + table
sql = "INSERT INTO " + table + " ("
sql += ",".join(fields) + ") VALUES (" + ",".join(["%s"]*len(fields)) + ")"
if return_cols:
sql += " RETURNING " + return_cols
sql += ";"
if 'debug' in kwargs:
raise Exception(sql % tuple(data.values()))
try:
self.log.event('POSTGRES: ' + (sql % tuple(data.values())))
self.cur.execute(sql, data.values())
if return_cols:
result = self.cur.fetchone()
return result
except Exception, e:
self.log.error(e.message)
self.conn.rollback()
self.close()
raise
`
I figured it out myself. Apparently psycopg2 behaves this way because I declared the connection and class variables outside __init__.
Related
I am trying to create a database using python. When the program runs no error occurs, however nothing happens. Is there a line I am missing?
class create_db:
def __init__(self):
self.conn = sqlite3.connect("EXAMPLE.db")
self.c = self.conn.cursor()
def create_tables(self, Tables):
for table_name, field in Tables.items():
self.c.execute('CREATE TABLE IF NOT EXISTS ' + table_name + '(' + field + ')')
self.conn.commit()
def main():
db = create_db()
tables = {"CUSTOMERS": '''CustomerID integer,
Customer_Name text,
primary key (CustomerID)'''}
db.create_tables(tables)
main()
I used the following code to get items from sqlite3 database
def get(self, item_name, attrs=True): #get attr from item and return as dict, if attr==True: get all items
conn = self.conn
if attrs: #all
return conn.execute('SELECT * FROM %s WHERE __item_key__ = "%s";' %(self.table, item_name))
else:
command = 'SELECT '
for attr in attrs:
command+= attr+' '
command+='FROM %s WHERE __item_key__ = "%s";' %(self.table, item_name)
return conn.execute(command)
print(get('name1'))
the code print the following:
<sqlite3.Cursor at 0x213d4c0f490>
instead of the values from the table.
When I try this:
get('name1')[0]
it returns:
TypeError: 'sqlite3.Cursor' object is not subscriptable
Full code:
import sqlite3 as sql
import sqlite3 as sql
class db:
'''
This class turns dicts into sqlite databases
and output sqlite databases as dicts
'''
def __init__(self, db_name, table_name): #open or create a database
conn = sql.connect(db_name).cursor()
self.table = table_name
self.conn = conn
def create(self, table_name, cols):
command = "CREATE TABLE %s(_item_key_ TEXT," % table_name
for key, value in cols.items():
command+="%s %s," %(key, value)
command=command[:-1]
command+=");"
self.conn.execute(command)
self.table = table_name
def get(self, item_name, attrs=True): #get attr from item and return as dict, if attr==True: get all items
conn = self.conn
if attrs: #all
return conn.execute('SELECT * FROM %s WHERE _item_key_ = "%s";' %(self.table, item_name))
else:
command = 'SELECT '
for attr in attrs:
if type(attr) == str:
attr = '"'+attr+'"'
command+= str(attr)+' '
command+='FROM %s WHERE _item_key_ = "%s";' %(self.table, item_name)
return conn.execute(command).fetchall()
def change(self, item_name, attrs): #change certain attrs of item
command = 'UPDATE %s SET ' %self.table
for key, value in attrs:
command += '%s=%s,'%(key, value)
command = command[:-1]+' WHERE _item_name_ = "'+item_name+'";'
def add(self, item_name, attrs): #add an item with attrs to database
command = 'INSERT INTO %s VALUES ("%s",' %(self.table, item_name)
for attr in attrs:
if type(attr) == str:
attr = '"'+attr+'"'
command += str(attr)+','
command = command[:-1]+');'
#print(command)
self.conn.execute(command)
def close(self): #close database
self.conn.close()
The table is supposed to look like the following (although I never saw it):
__item_name__ A B
---------------------------
'name1' 123 'hi'
'name2' 344 'bye'
Does anyone know how this works?
edit: I realized some bugs in create() and add(). However, after fixing some stuff it still prints the same thing in get().
It returns that no cursor object found.
If you want to get the results you need to add these lines:
cur = conn.cursor() # create a cursor to your connection
cur.execute(your_query) # execute your query
results = cur.fetchall() # fetch the results
Also don't forget to iterate over the cursor after results = cur.fetchall():
for row in results:
A = row[0]
B = row[1]
Should revise all code and implement this self.conn.commit() after self.conn.execute(---).
self.conn.execute(command)
self.conn.commit() #<--- THIS NEW line, to after .execute()
self.table = table_name
Using mysql and python, I have created a table within PyQt that will allow a user to frequently update and track their meeting sessions. The only problem is, I do not know how I would go about coding a button that will allow me to individually delete a row of data.
My table looks like so:
What would be the simplest way to create a button that will appear besides the row of data when new data is inserted that will allow the user to delete that entire row?
Edit:
def deleteRows(self):
items = self.treeWidget.selectedItems()
current = self.treeWidget.currentItem()
for item in items:
sip.delete(current)
self.UpdateTree()
Edit 2:
import mysql.connector
from mysql.connector import errorcode
from datetime import datetime
class DatabaseUtility:
def __init__(self, database, tableName):
self.db = database
self.tableName = tableName
f = open('C:\\Users\\Vlad\\Desktop\\Myfiles\\EnterprisePassport\\password.txt', 'r')
p = f.read(); f.close();
self.cnx = mysql.connector.connect(user = 'root',
password = p,
host = '127.0.0.1')
self.cursor = self.cnx.cursor()
self.ConnectToDatabase()
self.CreateTable()
def ConnectToDatabase(self):
try:
self.cnx.database = self.db
except mysql.connector.Error as err:
if err.errno == errorcode.ER_BAD_DB_ERROR:
self.CreateDatabase()
self.cnx.database = self.db
else:
print(err.msg)
def CreateDatabase(self):
try:
self.RunCommand("CREATE DATABASE %s DEFAULT CHARACTER SET 'utf8';" %self.db)
except mysql.connector.Error as err:
print("Failed creating database: {}".format(err))
def CreateTable(self):
cmd = (" CREATE TABLE IF NOT EXISTS " + self.tableName + " ("
" `ID` int(5) NOT NULL AUTO_INCREMENT,"
" `date` date NOT NULL,"
" `time` time NOT NULL,"
" `message` char(50) NOT NULL,"
" PRIMARY KEY (`ID`)"
") ENGINE=InnoDB;")
self.RunCommand(cmd)
def GetTable(self):
self.CreateTable()
return self.RunCommand("SELECT * FROM %s;" % self.tableName)
def GetColumns(self):
return self.RunCommand("SHOW COLUMNS FROM %s;" % self.tableName)
def RunCommand(self, cmd):
print ("RUNNING COMMAND: " + cmd)
try:
self.cursor.execute(cmd)
except mysql.connector.Error as err:
print ('ERROR MESSAGE: ' + str(err.msg))
print ('WITH ' + cmd)
try:
msg = self.cursor.fetchall()
except:
msg = self.cursor.fetchone()
return msg
def AddEntryToTable(self, message):
date1 = datetime.now().strftime("%y-%m-%d")
time = datetime.now().strftime("%H:%M")
cmd = " INSERT INTO " + self.tableName + " (date, time, message)"
cmd += " VALUES ('%s', '%s', '%s' );" % (date1, time, message)
self.RunCommand(cmd)
def __del__(self):
self.cnx.commit()
self.cursor.close()
self.cnx.close()
if __name__ == '__main__':
db = 'enterprisepassport'
tableName = 'session'
dbu = DatabaseUtility(db, tableName)
Just add a button at the top that deletes the selected rows. It looks like your using a QTreeWidget
def __init__(...)
...
self.deleteButton.clicked.connect(self.deleteRows)
def deleteRows(self):
items = self.tree.selectedItems()
for item in items:
# Code to delete items in database
self.refreshTable()
#Randomator, why not explain as if you're explaining to a beginner. Even I do not also understand what you're suggesting.
The guy wrote his codes, make corrections in his code so he can easily run the file
I have a database class in python which I use to query the database.
class Database():
def __init__(self, user, password, host, port, service_name, mode, *args):
#mode should be 0 if not cx_Oracle.SYSDBA
self.user = user
self.password = password
self.host = host
self.port = port
self.user = user
self.service_name = service_name
self.logger = logging.getLogger(__name__)
self.mode = 0
self.connection = None
self.connect_string = self.user + '/' + self.password + '#' + dsn
try:
self.connection = cx_Oracle.connect(self.connect_string, mode=self.mode, threaded=True)
self.connection.stmtcachesize = 1000
self.connection.client_identifier = 'my_app_scheduler'
self.cursor = self.connection.cursor()
self.cursor.arraysize = 10000
self.idVar = self.cursor.var(cx_Oracle.NUMBER)
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.exception('Exception occured while trying to create database object : %s', error.message)
raise exc
def query(self, q):
try:
self.cursor.execute(q)
return self.cursor.fetchall(), self.cursor.rowcount
except cx_Oracle.DatabaseError, exc:
raise exc
And this is the code to manipulate the fetched data and convert it.
output, rowcount = db_run_query.query(sql_text)
#self.logger.debug('output : %s, type : %s', output, type(output))
end_time=time.time()
time_taken=end_time - start_time
self.logger.debug('Rowcount : %s, time_taken : %s', rowcount, time_taken)
column_name = [d[0] for d in db_run_query.cursor.description]
result = [dict(zip(column_name, row)) for row in output]
#Convert everything to string : Eg: datetime
try:
for each_dict in result:
for key in each_dict:
if isinstance(each_dict[key], cx_Oracle.LOB):
self.logger.debug('%s', each_dict[key].size())
each_dict[key]=each_dict[key].read()
#self.logger.debug('%s %s %s %s %s %s %s', key, each_dict, type(key), type(each_dict[key]), type(each_dict), temp_each_dict, type(temp_each_dict))
else:
each_dict[key]=str(each_dict[key])
except Exception as e:
self.logger.debug(e)
So without self.cursor.arraysize = 10000
and for a query like select clob_value from table it was able to fetch the data and logged Rowcount : 4901, time_taken : 0.196296930313 but was giving me an error like
LOB variable no longer valid after subsequent fetch
but when I mention the arraysize parameter the error goes away. ( Is arraysize only for lob columns coz it works fine for select other_column from table where rownum<20000 <- other_column in varchar)
Why does that happen?
Turns out CLOBs and fetchall don't place nice together:
Internally, Oracle uses LOB locators which are allocated based on the
cursor array size. Thus, it is important that the data in the LOB
object be manipulated before another internal fetch takes place. The
safest way to do this is to use the cursor as an iterator. In
particular, do not use the fetchall() method.
Avoiding cursor.fetchall() and using it like an iterator (e.g. for row in cursor: ...) and i was able to get around this problem.
For a specific use case - in which I have 100 databases and 1 database is the central database, now my app connects to that one central database which spawns connections to any of the 100 databases as per the request of the user to run some query on any of them.
In this case does using DRCP makes same as I dont want the connection to be killed if the user is running the query at the same time I dont want too many connections to be opened to the db which I control by creating a profile on the database which limits the number of active sessions to some low number say 5 for that specific user(read_only_user) using that specific profile(read_only_profile).
Right now I am using the standard open a connection per request model. But Im not sure if thats the best way to go about it.
import cx_Oracle
import logging, time
class Database(object):
'''
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
'''
def __init__(self, user, password, host, port, service_name, mode, *args):
#mode should be 0 if not cx_Oracle.SYSDBA
self.user = user
self.password = password
self.host = host
self.port = port
self.user = user
self.service_name = service_name
self.logger = logging.getLogger(__name__)
try:
self.mode = mode
except:
self.mode = 0
self.logger.info(" Mode is not mentioned while creating database object")
self.connection = None
dsn = cx_Oracle.makedsn(self.host, self.port, self.service_name)
self.connect_string = self.user + '/' + self.password + '#' + dsn
try:
self.connection = cx_Oracle.connect(self.connect_string, mode=self.mode,
threaded=True)
self.connection.stmtcachesize = 1000
self.connection.client_identifier = 'my_app'
self.cursor = self.connection.cursor()
self.idVar = self.cursor.var(cx_Oracle.NUMBER)
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.exception(
'Exception occured while trying to create database object : %s',
error.message)
raise exc
def query(self, q):
try:
self.cursor.execute(q)
return self.cursor.fetchall()
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.info(
"Error occured while trying to run query: %s, error : %s", q,
error.message)
return error.message
def dml_query(self, sql):
try:
self.cursor.execute(sql)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def dml_query_with_data(self, sql, data):
"""
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
"""
try:
self.cursor.execute(sql, data)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def update_output(self, clob, job_id, flag):
try:
q = "Select output from my_table where job_id=%d" % job_id
self.cursor.execute(q)
output = self.cursor.fetchall()
#Checking if we already have some output in the clob for that job_id
if output[0][0] is None:
if flag == 1:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="COMPLETED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="FAILED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = OUTPUT || ',' || :p_clob
WHERE job_id = :p_key""", p_clob=clob, p_key=job_id)
self.connection.commit()
rows_updated = self.cursor.rowcount
return rows_updated
except Exception as e:
self.logger.exception(e)
return 0
def __del__(self):
try:
if self.connection is not None:
self.connection.close()
except Exception as e:
self.logger.exception(
"Exception while trying to close database connection object : %s", e)
'''
if __name__ == '__main__':
db = Database('test', 'test', 'my_host', '1000', 'my_db', 0)
columns = db.query('select * from my-table')
print columns
'''
This is my database class, and I create an object whenever I need a connect to the DB. And the init and del method take care of constructing and destructing the object.
Should I be using DRCP/ sessionPool to improve performance.
What if there are too many users waiting coz all the connections in DRCP are taken?
Can I have sessionPool per database (for the 100 databases, each database can take atmost 5 connections at a time for that read_only_user)