How to use python to query database in parallel - python

I have two functions which I use to query database. Assuming two separate queries, how to run these in parallel to query same database, and also wait for both results to return before continuing the execution of the rest of the code?
def query1(param1, param2):
result = None
logging.info("Connecting to database...")
try:
conn = connect(host=host, port=port, database=db)
curs = conn.cursor()
curs.execute(query)
result = curs
curs.close()
conn.close()
except Exception as e:
logging.error("Unable to access database %s" % str(e))
return result
def query2(param1, param2):
result = None
logging.info("Connecting to database...")
try:
conn = connect(host=host, port=port, database=db)
curs = conn.cursor()
curs.execute(query)
result = curs
curs.close()
conn.close()
except Exception as e:
logging.error("Unable to access database %s" % str(e))
return result

Here is a multi-threaded code that does what you're trying to accomplish:
from threading import Thread, Lock
class DatabaseWorker(Thread):
__lock = Lock()
def __init__(self, db, query, result_queue):
Thread.__init__(self)
self.db = db
self.query = query
self.result_queue = result_queue
def run(self):
result = None
logging.info("Connecting to database...")
try:
conn = connect(host=host, port=port, database=self.db)
curs = conn.cursor()
curs.execute(self.query)
result = curs
curs.close()
conn.close()
except Exception as e:
logging.error("Unable to access database %s" % str(e))
self.result_queue.append(result)
delay = 1
result_queue = []
worker1 = DatabaseWorker("db1", "select something from sometable",
result_queue)
worker2 = DatabaseWorker("db1", "select something from othertable",
result_queue)
worker1.start()
worker2.start()
# Wait for the job to be done
while len(result_queue) < 2:
sleep(delay)
job_done = True
worker1.join()
worker2.join()

Related

2013, 'Lost connection to MySQL server during query'

This is my MySQLConnector code.
import logging
import pymysql
logger = logging.getLogger(__name__)
class MySQLConnector:
def __init__(self, config):
self.connection = None
self._connect(config=config)
def _connect(self, config):
config["charset"] = "utf8mb4"
config["cursorclass"] = pymysql.cursors.DictCursor
self.connection = pymysql.connect(**config)
def read(self, query, params=None):
result = None
try:
with self.connection.cursor() as cursor:
cursor.execute(query, params)
result = cursor.fetchall()
self.connection.commit()
except Exception as e:
logger.error(e)
return result
And I use that like this.
connector = MySQLConnector(config=config)
while True:
query = "SELECT * FROM my_table"
print(connector.read(query=query))
time.sleep(30)
This works well but after few hours later, this process raise error (2013, 'Lost connection to MySQL server during query'), and I can't find the reason.

Having trouble autoincrementing in an API

I built a to-do list API with Flask and SQlite, and now I'm trying to use AUTOINCREMENT for incrementing the id's for the tasks. However, I am getting an error ("Error: NOT NULL constraint failed: incomplete.id") when I try to add something to the list. I'm not sure why, I looked at the sqlite documentation, and I seem to be following. I even tried reformatting the create table statements. I'm not sure what else to do, i'd really appreciate some guidance/advice/help. Thanks!
Here is my helper.py
import helper
from flask import Flask, request, jsonify, Response
import json
app = Flask(__name__)
#app.route('/')
def hello_world():
return 'Hello World!'
#app.route('/tasks/new', methods=['PUT'])
def add_task():
# global idCount
# idCount = idCount + 1
# get item from the POST body, request module used to parse request and get HTTP body data. response is used to return response to the client, of type JSON
req_data = request.get_json()
task = req_data['task']
# add task to the list
res_data = helper.add_to_incomplete(task)
# return error if task cant be added
if res_data is None:
response = Response("{'error': 'Task not added - " + task + "'}", mimetype='application/json')
return response;
response = Response(json.dumps(res_data), mimetype='application/json')
return response
#app.route('/tasks/all', methods = ["GET"])
def get_all_items():
res_data = helper.get_all_completes(), helper.get_all_incompletes()
response = Response(json.dumps(res_data), mimetype='application/json')
return response
#app.route('/tasks/complete', methods = ["POST"])
def complete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.add_to_complete(inputId)
# find matching task to input id
return "completed task" + inputId
#app.route('/tasks/incomplete', methods = ["PATCH"])
def uncomplete_task():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.uncomplete(inputId)
# find matching task to input id
return "un-completed task" + inputId
#app.route('/tasks/remove', methods = ["DELETE"])
def delete():
req_data = request.get_json()
inputId = req_data['id']
res_data = helper.delete_task(inputId)
if res_data is None:
response = Response("{'error': 'Error deleting task - '" + task + "}", status=400 , mimetype='application/json')
return "deleted task id" + " " + inputId
#app.route('/tasks/empty', methods = ["EMPTY"])
def delete_all():
helper.empty()
return "you deleted everything"
Here is my helper.py:
import sqlite3
import random
#for id's because users dont set them
DB_PATH = './todo.db'
# connect to database
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute("CREATE TABLE IF NOT EXISTS complete (id INTEGER PRIMARY KEY, task TEXT NOT NULL);")
# save the change
c.execute("CREATE TABLE IF NOT EXISTS incomplete (id INTEGER PRIMARY KEY, task TEXT NOT NULL);")
conn.commit()
def add_to_incomplete(task):
try:
# id = str(random.randrange(100,999))
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('insert into incomplete(task) values(?)', (task,))
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def add_to_complete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select task from incomplete where id=?', (inputId,))
tasks = c.fetchone()[0]
c.execute('insert into complete values(?,?)', (inputId,tasks))
delete_task(inputId)
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def get_all_completes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from complete')
rows = c.fetchall()
conn.commit()
return { "complete": rows }
except Exception as e:
print('Error: ', e)
return None
def get_all_incompletes():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select * from incomplete')
rows = c.fetchall()
conn.commit()
return { "incomplete": rows }
except Exception as e:
print('Error: ', e)
return None
def uncomplete(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('select task from complete where id=?', (inputId,))
tasks = c.fetchone()[0]
c.execute('insert into incomplete values(?,?)', (inputId,tasks))
delete_task(inputId)
conn.commit()
return {"id": id}
except Exception as e:
print('Error: ', e)
return None
def delete_task(inputId):
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('delete from complete where id=?', (inputId,))
c.execute('delete from incomplete where id=?', (inputId,))
conn.commit()
return {"id":id}
except Exception as e:
print('Error: ', e)
return None
def empty():
try:
conn = sqlite3.connect(DB_PATH)
c = conn.cursor()
c.execute('delete from complete')
c.execute('delete from incomplete')
conn.commit()
return "you deleted everything mwahaha"
except Exception as e:
print('Error: ', e)
return None
I would suggest changing your sql table creation code to:
create table if not exists complete
(
id int auto_increment,
constraint complete_pk
primary key (id)
);
However a better option is to use SQLAlchemy

Retrieving data returns none

I am trying to retrieve all rows in mysql.
import mysql.connector,sys
user,pw, host,db = 'root','12345','127.0.0.1','CA2Database'
cnx = mysql.connector.connect(user=user, password=pw, host=host, database=db)
cursor = cnx.cursor()
try:
print(cursor.execute('SELECT * FROM student'))
except mysql.connector.Error as err:
print(err)
print("Error Code:", err.errno)
print("SQLSTATE", err.sqlstate)
print("Message", err.msg)
finally:
print()
The output is None . There is data in the table.
https://imgur.com/a/APUuZot
Probably you were missing some necessary function calls. This should do:
user,pw, host,db = 'root','12345','127.0.0.1','CA2Database'
cnx = mysql.connector.connect(user=user, password=pw, host=host, database=db)
cursor = cnx.cursor()
sql_query = 'SELECT * FROM student;'
cursor.execute(sql_query)
data = cursor.fetchall()
cnx.commit()
cursor.close()

Connection Management Working with Database in Python

I have a class that working with db operations like below :
class DepartmentOperations(DatabaseOperations):
def __init__(self):
try:
self._connection = Database.create_connection()
self._cursor = self._connection.cursor()
self.isactive = True
except ConnectionException as ex:
print(ex.args)
def get_id(self, department_name):
if(self.isactive):
try:
self._cursor.execute("select BolumId from BOLUMLER where BolumAdi = %s" , department_name)
row = self._cursor.fetchone()
if row is not None:
return row[0]
else:
return 0
except:
raise DbException("Kayıt Getirirken Hata OLuştu...")
finally:
self._connection.close()
self._cursor.close()
self.isactive = False
else:
try:
self._connection = Database.create_connection()
self._cursor = self._connection.cursor()
self.isactive = True
except ConnectionException as ex:
print(ex.args)
try:
self._cursor.execute("select BolumId from BOLUMLER where BolumAdi = %s" , department_name)
row = self._cursor.fetchone()
if row is not None:
return row[0]
else:
return 0
except:
raise DbException("Kayıt Getirirken Hata OLuştu...")
finally:
self._connection.close()
self._cursor.close()
self.isactive = False
def add(self, department_name):
if(self.isactive):
try:
self._cursor.execute("insert into BOLUMLER values (%s)",(department_name))
self._connection.commit()
except:
raise DbException("Veri kayıt ederken hata oluştu.")
finally:
self._connection.close()
self._cursor.close()
self.isactive = False
else:
try:
self._connection = Database.create_connection()
self._cursor = self._connection.cursor()
self.isactive = True
except ConnectionException as ex:
print(ex.args)
try:
self._cursor.execute("insert into BOLUMLER values (%s)",(department_name))
self._connection.commit()
except:
raise DbException("Veri kayıt ederken hata oluştu.")
finally:
self._connection.close()
self._cursor.close()
self.isactive = False
When i instantiate this class and use it, works for the first but not second time because as u see in the code in finally block i close the connection . I delete finally block the methods work good but when i close the connection . How can i manage connections ?
Best way is don't keep the connection open if you are in a web application, instead you can use with statement :
Like this:
with pymssql.connect(server, user, password, "tempdb") as conn:
with conn.cursor(as_dict=True) as cursor:
cursor.execute('SELECT * FROM persons WHERE salesrep=%s', 'John Doe')
for row in cursor:
print("ID=%d, Name=%s" % (row['id'], row['name']))
This way, the connection will open and close in the context.
You can check if connection still active or not:
Use try/except and if the db connection is closed, reopen it.
You can use a custom pool:
def pool(ctor, limit=None):
local_pool = multiprocessing.Queue()
n = multiprocesing.Value('i', 0)
#contextlib.contextmanager
def pooled(ctor=ctor, lpool=local_pool, n=n):
# block iff at limit
try: i = lpool.get(limit and n.value >= limit)
except multiprocessing.queues.Empty:
n.value += 1
i = ctor()
yield i
lpool.put(i)
return pooled
example:
def do_something():
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
cursor.execute(sql, ('webmaster#python.org', 'very-secret'))
connection.commit()
with connection.cursor() as cursor:
# Read a single record
sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
cursor.execute(sql, ('webmaster#python.org',))
result = cursor.fetchone()
print(result)
finally:
connection.close()
and then
my_pool = pool(lambda: do_something())
with my_pool() as my_obj:
my_obj.do_something()
PYDAL:
I recommend you to use pydal, Extended documentation here
General usage is something like this: (Almost for every database usage):
from pydal import DAL
db = DAL('mysql://username:password#localhost/test', pool_size=150)
class DALHandler(Object):
def __init__(self, db):
self.db = db
def on_start(self):
self.db._adapter.reconnect()
def on_success(self):
self.db.commit()
def on_failure(self):
self.db.rollback()
def on_end(self):
self.db._adapter.close()

How to use "INSERT" in psycopg2 connection pooling?

I use psycopg2 to connect to PostgreSQL on Python and I want to use connection pooling.
I don't know what should I do instead commit() and rollback() when I execute INSERT query.
db = pool.SimpleConnectionPool(1, 10,host=conf_hostname,database=conf_dbname,user=conf_dbuser,password=conf_dbpass,port=conf_dbport)
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con.cursor()
finally:
db.putconn(con)
with get_cursor() as cursor:
cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
I don't get id of inserted record without commit().
UPDATE I can not test the code but I give you some ideas:
You do the commit in connection not in db
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con
finally:
db.putconn(con)
with get_cursor() as cursor:
con.cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
con.commit()
id = cursor.fetchone()
or
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con.cursor()
con.commit()
finally:
db.putconn(con)
with get_cursor() as cursor:
con.cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
Connection pooling exist because creating a new connection to a db can be expensive and not to avoid commits or rollbacks. So you can commit your data without any issue, committing data will not destroy the connection.
here is my working example:
db = pool.SimpleConnectionPool(1, 10,host=conf_hostname,database=conf_dbname,user=conf_dbuser,password=conf_dbpass,port=conf_dbport)
#contextmanager
def get_connection():
con = db.getconn()
try:
yield con
finally:
db.putconn(con)
def write_to_db():
with get_connection() as conn:
try:
cursor = conn.cursor()
cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
cursor.close()
conn.commit()
except:
conn.rollback()
I think this will be a little more pythonic:
db_pool = pool.SimpleConnectionPool(1, 10,
host=CONF.db_host,
database=CONF.db_name,
user=CONF.db_user,
password=CONF.db_user,
port=CONF.db_port)
#contextmanager
def db():
con = db_pool.getconn()
cur = con.cursor()
try:
yield con, cur
finally:
cur.close()
db_pool.putconn(con)
if __name__ == '__main__':
with db() as (connection, cursor):
try:
cursor.execute("""INSERT INTO table (fields)
VALUES (values) RETURNING id""")
my_id = cursor.fetchone()
rowcount = cursor.rowcount
if rowcount == 1:
connection.commit()
else:
connection.rollback()
except psycopg2.Error as error:
print('Database error:', error)
except Exception as ex:
print('General error:', ex)

Categories