When I testing the function of update_or_create in multi threading condition, I found the result is not what I wanted,they created more than one record in MySQL. As the code show, update_or_create used select .. for update to lock rows in MySQL, then it should be only one record in MySQL. I used SQLAlchemy and row sql has proved that.
So, is the Django codes wrong?
with Django code:
def get_or_create_ins():
p, created = OxalicAcid.objects.update_or_create(defaults={"formula": "20", "degree": "80"}, name="smart")
def run():
for i in range(10):
t = threading.Thread(target=get_or_create_ins, args=())
t.start()
if __name__ == "__main__":
# more than one record will be created
run()
with SQLAlchemy code:
#contextmanager
def transaction_atomic():
session = Session()
try:
yield session
session.commit()
except Exception as e:
session.rollback()
raise e
def get_result_with_update(session, name):
sql = text("""
select * from acid_oxalicacid where name = :name for update
""")
params = dict(name=name)
cursor = session.execute(sql, params)
result = cursor.fetchall()
return result
def get_result(session, name):
sql = text("""
select * from acid_oxalicacid where name = :name
""")
params = dict(name=name)
cursor = session.execute(sql, params)
result = cursor.fetchall()
return result
def create_data(session, name, degree, formula):
sql = text("""
insert into acid_oxalicacid (name, degree, formula) values (:name, :degree, :formula)
""")
params = dict(
name=name,
degree=degree,
formula=formula
)
session.execute(sql, params)
def get_or_create():
name = "smart"
degree = "50"
formula = "100"
with transaction_atomic() as session:
res = get_result_with_update(session, name)
if not res:
create_data(session, name, degree, formula)
res = get_result(session, name)
return res
if __name__ == "__main__":
# Only one record be created, that's correct
for i in range(10):
t = threading.Thread(target=get_or_create, args=())
t.start()
Because Django use the 'read committed' transaction isolation level, so it will be multiply records, if change it to 'repeatable read', it's will be only one record in database.
Related
I have a database named products in sql and i wish to get all the rows as a dictionary or json. I've seen an example here but how do i pass username, password and host?
This is the example:
import json
import psycopg2
def db(database_name='products'):
return psycopg2.connect(database=database_name)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
When i use it like this i'm getting this error:
File "/home/alex/Documents/Proiecte/Python/bapp/venv/lib/python3.5/site-packages/psycopg2/__init__.py", line 130, in connect
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
psycopg2.OperationalError: fe_sendauth: no password supplied
When i'm doing like this
import json
import psycopg2
def db(database_name='products', password='...', host='123.123.123.13', user='alex'):
return psycopg2.connect(database=database_name, password=password, host=host, user=user)
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
my_query = query_db("SELECT * FROM main_prod WHERE id = 1")
print(my_query)
json_output = json.dumps(my_query)
print(json_output)
It won't print anything, it just remains like in sleep.
How can i do it?
Try this:
import psycopg2
import json
def main():
conn_string = "database_name='products', password='...', host='123.123.123.13', user='alex'"
# print the connection string we will use to connect
print "Connecting to database\n ->%s" % (conn_string)
# get a connection, if a connect cannot be made an exception will be raised here
conn = psycopg2.connect(conn_string)
# conn.cursor will return a cursor object, you can use this cursor to perform queries
cursor = conn.cursor()
# execute our Query
cursor.execute("SELECT * FROM main_prod WHERE id = 1")
# retrieve the records from the database
records = cursor.fetchall()
objects = [
{
'id': row.id,
} for row in records
] # there you tell what data you want to return
json_output = json.dumps(objects)
print(json_output)
if __name__ == "__main__":
main()
I'm learning python since last few weeks. For better learning, I decided to work on some project. So here is my Class for MySQL connection and demo example as well. Can you please tell me. What other improvement can be possible for following code?
Structure?
What else I can do to optimize code?
And Please forgive. If I'm doing some silly mistakes in code. (I'm learning)
#!/usr/bin/python
import pymysql
# select (table, parameter)
# insert (table, data)
# update (table, id, data)
# delete (table, id)
class MySQL:
def __init__(self):
self.sort_by = ""
self.order = ""
# initiate database connection.
self.connection = pymysql.connect(host='localhost',
user='root',
password='',
db='sherlock',
charset='utf8mb4')
self.cursor = self.connection.cursor(pymysql.cursors.DictCursor)
# this function is for selecting any feild on any table.(feilds veriable is optinal)
def select(self, table, *feilds):
flds = "" #differnt name for feilds veriable.
if not feilds:
flds = '*'
else:
for f in feilds:
if not flds:
flds = f
else:
flds += ",`%s`" % f
sql = "SELECT %s FROM `%s` " % (flds, table)
if self.sort_by:
sql = sql +"order by "+ str(self.sort_by) +" "+ str(self.order)
print sql
self.cursor.execute(sql)
result = self.cursor.fetchall()
return result
# This function is for data sorting for Mysql; but optinal.
# example : SELECT * FROM `users` order by id asc
def order_by(self, sort_by="", order="", *args, **kwargs):
self.sort_by = sort_by
self.order = order
# this function is for closing Mysql connection
def close(self):
self.connection.close()
########### END OF MySQL CLASS #############
sql = MySQL()
# sql.order_by function should be called before the sql.select() function.
sql.order_by("email")
# this will select all the feilds from `users` table.
# you can specify whichever feilds you want to return. like : sql.select("users", "id, email")
result = sql.select("users", "password")
for email in result:
print email["password"]
sql.close()
I'm working on an IRC bot, forked from a modular bot called Skybot.
There are two other modules that make use of the sqlite3 database by default; they have both been removed and their tables dropped, so I know that the issue is somewhere in what I'm doing.
I only call 3 db.execute() statements in the whole thing and they're all immediately committed. This thing isn't getting hammered with queries either, but the lock remains.
Relevant code:
def db_init(db):
db.execute("create table if not exists searches"
"(search_string UNIQUE PRIMARY KEY,link)")
db.commit()
return db
def get_link(db, inp):
row = db.execute("select link from searches where"
" search_string=lower(?) limit 1",
(inp.lower(),)).fetchone()
db.commit()
return row
def store_link(db, stub, search):
db.execute("insert into searches (search_string, link) VALUES (?, ?)", (search.lower(), stub))
db.commit()
return stub
If the script only has to touch db_init() and get_link() it breezes through, but if it needs to call store_link() while the database is unlocked it will do the insert, but doesn't seem to be committing it in a way that future calls to get_link() can read it until the bot restarts.
The bot's db.py:
import os
import sqlite3
def get_db_connection(conn, name=''):
"returns an sqlite3 connection to a persistent database"
if not name:
name = '%s.%s.db' % (conn.nick, conn.server)
filename = os.path.join(bot.persist_dir, name)
return sqlite3.connect(filename, isolation_level=None)
bot.get_db_connection = get_db_connection
I did adjust the isolation_level myself, that was originally timeout=10. I am fairly stumped.
EDIT: The usages of get_db_connection():
main.py (main loop):
def run(func, input):
args = func._args
if 'inp' not in input:
input.inp = input.paraml
if args:
if 'db' in args and 'db' not in input:
input.db = get_db_connection(input.conn)
if 'input' in args:
input.input = input
if 0 in args:
out = func(input.inp, **input)
else:
kw = dict((key, input[key]) for key in args if key in input)
out = func(input.inp, **kw)
else:
out = func(input.inp)
if out is not None:
input.reply(unicode(out))
...
def start(self):
uses_db = 'db' in self.func._args
db_conns = {}
while True:
input = self.input_queue.get()
if input == StopIteration:
break
if uses_db:
db = db_conns.get(input.conn)
if db is None:
db = bot.get_db_connection(input.conn)
db_conns[input.conn] = db
input.db = db
try:
run(self.func, input)
except:
traceback.print_exc()
Send conn in your functions, along with db, as mentioned. If you wrote the code yourself, you'll know where the database actually is. Conventionally you would do something like:
db = sqlite3.connect('database.db')
conn = db.cursor()
Then for general usage:
db.execute("...")
conn.commit()
Hence, in your case:
def db_init(conn,db):
db.execute("create table if not exists searches"
"(search_string UNIQUE PRIMARY KEY,link)")
conn.commit()
return db
def get_link(conn,db, inp):
row = db.execute("select link from searches where"
" search_string=lower(?) limit 1",
(inp.lower(),)).fetchone()
conn.commit()
return row
def store_link(conn,db, stub, search):
db.execute("insert into searches (search_string, link) VALUES (?, ?)", (search.lower(), stub))
conn.commit()
return stub
On the basis that you have set the isolation_level to automatic updates:
sqlite3.connect(filename, isolation_level=None)
There is no need whatsoever for the commit statements in your code
Edit:
Wrap your execute statements in try statements, so that you at least have a chance of finding out what is going on i.e.
import sqlite3
def get_db(name=""):
if not name:
name = "db1.db"
return sqlite3.connect(name, isolation_level=None)
connection = get_db()
cur = connection.cursor()
try:
cur.execute("create table if not exists searches"
"(search_string UNIQUE PRIMARY KEY,link)")
except sqlite3.Error as e:
print 'Searches create Error '+str(e)
try:
cur.execute("insert into searches (search_string, link) VALUES (?, ?)", ("my search", "other"))
except sqlite3.Error as e:
print 'Searches insert Error '+str(e)
cur.execute("select link from searches where search_string=? limit 1", ["my search"])
s_data = cur.fetchone()
print 'Result:', s_data
I have a small problem with this class which handle my DB. It still saying:
cursor.execute(sql)
ValueError: operation parameter must be str
I tried lots of things but nothing work as i want. I looked over https://docs.python.org/3.4/library/sqlite3.html and i'm sure i do the same things.
import sqlite3
class Database():
def __init__(self):
try:
self.db = sqlite3.connect('../database.sqlite')
self.cur = self.db.cursor()
self.cur.execute('pragma foreign_keys="1"')
except sqlite3.Error as e:
raise e
def select(self,sql):
cursor = self.db.cursor()
cursor.execute(sql)
records = cursor.fetchall()
cursor.close()
return records
def insert(self,sql):
cursor = self.db.cursor()
cursor.execute(sql)
newID = cursor.lastrowid
self.db.commit()
cursor.close()
return newID
def execute(self,sql):
""" execute any SQL statement but no return value given """
cursor = self.db.cursor()
cursor.execute(sql)
self.db.commit()
cursor.close()
if __name__ == '__main__':
db = Database()
#sql = "SELECT skuref, titre_prod FROM product"
t = ("888888",)
sql= "UPDATE product SET created = 1 WHERE skuref = ?", t
db.execute(sql)
If someone can help me it would be grateful.Later i wanted to pass something like this in the main program inside a for loop
lastpost = record[0]
if created = True
sql = "UPDATE product SET created = 1 WHERE skuref = ?",(lastpost,)
db.execute(sql)
sql is a tuple containing SQL statement and the parameters.
Change as following, so that sql and parameters are passed separately, instead of being passed as a tuple:
def execute(self, sql):
""" execute any SQL statement but no return value given """
cursor = self.db.cursor()
cursor.execute(*sql) # <------
self.db.commit()
cursor.close()
With your statement
sql = "UPDATE product SET created = 1 WHERE skuref = ?",(lastpost,)
you have created a tupel like
("UPDATE product SET created = 1 WHERE skuref = ?", (lastpost,))
You have to give the arguments as parameters to the execute() function.
Also your if statement is bad: no :, = instead of == and the whole check for True is no nesesary.
Try this:
lastpost = record[0]
if created:
sql = "UPDATE product SET created = 1 WHERE skuref = ?"
db.execute(sql, lastpost)
For a specific use case - in which I have 100 databases and 1 database is the central database, now my app connects to that one central database which spawns connections to any of the 100 databases as per the request of the user to run some query on any of them.
In this case does using DRCP makes same as I dont want the connection to be killed if the user is running the query at the same time I dont want too many connections to be opened to the db which I control by creating a profile on the database which limits the number of active sessions to some low number say 5 for that specific user(read_only_user) using that specific profile(read_only_profile).
Right now I am using the standard open a connection per request model. But Im not sure if thats the best way to go about it.
import cx_Oracle
import logging, time
class Database(object):
'''
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
'''
def __init__(self, user, password, host, port, service_name, mode, *args):
#mode should be 0 if not cx_Oracle.SYSDBA
self.user = user
self.password = password
self.host = host
self.port = port
self.user = user
self.service_name = service_name
self.logger = logging.getLogger(__name__)
try:
self.mode = mode
except:
self.mode = 0
self.logger.info(" Mode is not mentioned while creating database object")
self.connection = None
dsn = cx_Oracle.makedsn(self.host, self.port, self.service_name)
self.connect_string = self.user + '/' + self.password + '#' + dsn
try:
self.connection = cx_Oracle.connect(self.connect_string, mode=self.mode,
threaded=True)
self.connection.stmtcachesize = 1000
self.connection.client_identifier = 'my_app'
self.cursor = self.connection.cursor()
self.idVar = self.cursor.var(cx_Oracle.NUMBER)
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.exception(
'Exception occured while trying to create database object : %s',
error.message)
raise exc
def query(self, q):
try:
self.cursor.execute(q)
return self.cursor.fetchall()
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.info(
"Error occured while trying to run query: %s, error : %s", q,
error.message)
return error.message
def dml_query(self, sql):
try:
self.cursor.execute(sql)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def dml_query_with_data(self, sql, data):
"""
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
"""
try:
self.cursor.execute(sql, data)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def update_output(self, clob, job_id, flag):
try:
q = "Select output from my_table where job_id=%d" % job_id
self.cursor.execute(q)
output = self.cursor.fetchall()
#Checking if we already have some output in the clob for that job_id
if output[0][0] is None:
if flag == 1:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="COMPLETED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="FAILED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = OUTPUT || ',' || :p_clob
WHERE job_id = :p_key""", p_clob=clob, p_key=job_id)
self.connection.commit()
rows_updated = self.cursor.rowcount
return rows_updated
except Exception as e:
self.logger.exception(e)
return 0
def __del__(self):
try:
if self.connection is not None:
self.connection.close()
except Exception as e:
self.logger.exception(
"Exception while trying to close database connection object : %s", e)
'''
if __name__ == '__main__':
db = Database('test', 'test', 'my_host', '1000', 'my_db', 0)
columns = db.query('select * from my-table')
print columns
'''
This is my database class, and I create an object whenever I need a connect to the DB. And the init and del method take care of constructing and destructing the object.
Should I be using DRCP/ sessionPool to improve performance.
What if there are too many users waiting coz all the connections in DRCP are taken?
Can I have sessionPool per database (for the 100 databases, each database can take atmost 5 connections at a time for that read_only_user)