multithreading in mysql and python - python

I am learning python and mysql and with that I want to use multythreading to write in mysql database
when I try to do that and try to make multiple threads It shows error like connection not found but If I try with 1 thread it works fine but It has lower speed i.e. 40 rows p/s
please help me to do that and if I am doing wrong please let me know if there is a good way to do that thanx
import mysql.connector
from queue import Queue
from threading import Thread
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="list"
)
def do_stuff(q):
while True:
mycursor = mydb.cursor()
a= q.get()
sql = "INSERT INTO demo1 (id, name, price, tmp) VALUES (%s, %s, %s, %s)"
val = (a[0], a[1],a[3],a[2])
mycursor.execute(sql, val)
mydb.commit()
q.task_done()
q = Queue(maxsize=0)
num_threads = 1 #if I try more then 1 it throw error "IndexError: bytearray index out of range"
for i in range(num_threads):
worker = Thread(target=do_stuff, args=(q,))
worker.setDaemon(True)
worker.start()
def strt():
mycursor = mydb.cursor()
sql = f"SELECT * FROM demo ORDER BY id"
mycursor.execute(sql)
myresult = mycursor.fetchall()
for x in myresult:
q.put(x)
strt()

Hi in order to do the transactions you have to open connection to each thread. How this works is when you open a connection those connections are getting by a pool. If you open one connection that connection is always used by one process and not letting the other to connect.
It will not make any bottle necks because when one connection is free that connection will be chosen from the pool.
Source

Related

Python MySQL Connector Timeout

Inserting records into a MySQL database using Python MySQL Connector. Process works around 98% of the time. I am getting intermittent timeout responses from the server. Other times the script just hangs and nothing happens, which is the worse possible situation.
Can I timeout/kill the process using threading?
Can I set a timeout on the execute or commit statement?
import mysql.connector
try:
mydb = mysql.connector.connect(host="...", user="...", password="...", database="...")
mycursor = mydb.cursor()
except Exception as e:
print(e)
#Seems to always work up to this point
sql = "INSERT INTO test (name,tagline,location,experience) VALUES (%s, %s, %s, %s)"
val = ('test', 'test', 'test', 'test')
try:
mycursor.execute(sql, val)
mydb.commit()
except Exception as e:
print(e)
mycursor.close()
mydb.close()

Python program hangs when using multi-thread select sql query with MySQL db

I have a program like this:
def runBlockHash(i):
#Somethings here#
sql='SELECT * from abc where zzz=xyz'
mycursor.execute(sql, hashTree)
results = mycursor.fetchall()
#Something more here##
print("Connecting to SQL DB")
mydb = mysql.connector.connect(
host=args['hostname'],
user=args['username'],
passwd=args['password'],
database=args['database']
)
mycursor = mydb.cursor()
p=mp.Pool(2) #Define Number of Process to Use
p.map(runBlockHash,range(len(files_to_process))) # Call the function
p.close() #Close the multi threads
p.join()
But the problem I am facing is, when i have more than 1 CPU, not even a single sql query is executed. Python program just freeze.
I just want to run select statements. I don't want to insert or update the database.

PYTHON MYSQL doesn't work second time

Am receiving json data (from an other python script) to put inside MYSQL database, the code work fine the first time but the second time I got this error:
raise errors.OperationalError("MySQL Connection not available.")
mysql.connector.errors.OperationalError: MySQL Connection not available.
For troubleshooting am sending always the same data, but it still write an error the second time.
I tried also from information found on furums to place : cur = mydb.cursor() at diferents places but I have never been able to get this code work the second time.
There is my code :
import mysql.connector
import json
mydb = mysql.connector.connect(
host="localhost",
user="***",
passwd="***",
database="***"
)
def DATA_REPARTITION(Topic, jsonData):
if Topic == "test":
#print ("Start")
INSERT_DEBIT(jsonData)
def INSERT_DEBIT(jsonData):
cur = mydb.cursor()
#Read json from MQTT
print("Start read data to insert")
json_Dict = json.loads(jsonData)
debit = json_Dict['debit']
print("I send")
print(debit)
#Insert into DB Table
sql = ("INSERT INTO debit (data_debit) VALUES (%s)")
val=debit,
cur.execute(sql,val)
mydb.commit()
print(cur.rowcount, "record inserted.")
cur.close()
mydb.close()
Thanks for your help!
You only open your database connection once, at the start of the script, and you close that connection after making the first insert. Hence, second and subsequent inserts are failing. You should create a helper function which returns a database connection, and then call it each time you want to do DML:
def getConnection():
mydb = mysql.connector.connect(
host="localhost",
user="***",
passwd="***",
database="***")
return mydb
def INSERT_DEBIT(jsonData):
mydb = getConnection()
cur = mydb.cursor()
# Read json from MQTT
# rest of your code here...
cur.close()
mydb.close()

connect to mysql in a loop

i have to connect to mysql server and grab some data for ever
so i have two way
1)connect to mysql the grab data in a while
conn = mysql.connector.connect(user='root',password='password',host='localhost',database='db',charset='utf8',autocommit=True)
cursor = conn.cursor(buffered=True)
while True:
cursor.execute("statments")
sqlData = cursor.fetchone()
print(sqlData)
sleep(0.5)
this working good but if script crashed due to mysql connection problem script goes down
2)connect to mysql in while
while True:
try:
conn = mysql.connector.connect(user='root',password='password',host='localhost',database='db',charset='utf8',autocommit=True)
cursor = conn.cursor(buffered=True)
cursor.execute("statments")
sqlData = cursor.fetchone()
print(sqlData)
cursor.close()
conn.close()
sleep(0.5)
except:
print("recoverable error..")
both code working good but my question is which is better?!
Among these two, better way will be to use a single connection but create a new cursor for each statement because creation of new connection takes time but creating a new cursor is fast. You may update the code as:
conn = mysql.connector.connect(user='root',password='password',host='localhost',database='db',charset='utf8',autocommit=True)
while True:
try:
cursor = conn.cursor(buffered=True)
cursor.execute("statments")
sqlData = cursor.fetchone()
print(sqlData)
except Exception: # Catch exception which will be raise in connection loss
conn = mysql.connector.connect(user='root',password='password',host='localhost',database='db',charset='utf8',autocommit=True)
cursor = conn.cursor(buffered=True)
finally:
cursor.close()
conn.close() # Close the connection
Also read Defining Clean-up Actions regarding the usage of try:finally block.

SQLAlchemy memory hog on select statement

As per the SQLAlchemy, select statements are treated as iterables in for loops. The effect is that a select statement that would return a massive amount of rows does not use excessive memory.
I am finding that the following statement on a MySQL table:
for row in my_connections.execute(MyTable.__table__.select()):
yield row
Does not seem to follow this, as I overflow available memory and begin thrashing before the first row is yielded. What am I doing wrong?
The basic MySQLdb cursor fetches the entire query result at once from the server.
This can consume a lot of memory and time.
Use MySQLdb.cursors.SSCursor when you want to make a huge query and
pull results from the server one at a time.
Therefore, try passing connect_args={'cursorclass': MySQLdb.cursors.SSCursor}
when creating the engine:
from sqlalchemy import create_engine, MetaData
import MySQLdb.cursors
engine = create_engine('mysql://root:zenoss#localhost/e2', connect_args={'cursorclass': MySQLdb.cursors.SSCursor})
meta = MetaData(engine, reflect=True)
conn = engine.connect()
rs = s.execution_options(stream_results=True).execute()
See http://www.sqlalchemy.org/trac/ticket/1089
Note that using SSCursor locks the table until the fetch is complete. This affects other cursors using the same connection: Two cursors from the same connection can not read from the table concurrently.
However, cursors from different connections can read from the same table concurrently.
Here is some code demonstrating the problem:
import MySQLdb
import MySQLdb.cursors as cursors
import threading
import logging
import config
logger = logging.getLogger(__name__)
query = 'SELECT * FROM huge_table LIMIT 200'
def oursql_conn():
import oursql
conn = oursql.connect(
host=config.HOST, user=config.USER, passwd=config.PASS,
db=config.MYDB)
return conn
def mysqldb_conn():
conn = MySQLdb.connect(
host=config.HOST, user=config.USER,
passwd=config.PASS, db=config.MYDB,
cursorclass=cursors.SSCursor)
return conn
def two_cursors_one_conn():
"""Two SSCursors can not use one connection concurrently"""
def worker(conn):
cursor = conn.cursor()
cursor.execute(query)
for row in cursor:
logger.info(row)
conn = mysqldb_conn()
threads = [threading.Thread(target=worker, args=(conn, ))
for n in range(2)]
for t in threads:
t.daemon = True
t.start()
# Second thread may hang or raise OperationalError:
# File "/usr/lib/pymodules/python2.7/MySQLdb/cursors.py", line 289, in _fetch_row
# return self._result.fetch_row(size, self._fetch_type)
# OperationalError: (2013, 'Lost connection to MySQL server during query')
for t in threads:
t.join()
def two_cursors_two_conn():
"""Two SSCursors from independent connections can use the same table concurrently"""
def worker():
conn = mysqldb_conn()
cursor = conn.cursor()
cursor.execute(query)
for row in cursor:
logger.info(row)
threads = [threading.Thread(target=worker) for n in range(2)]
for t in threads:
t.daemon = True
t.start()
for t in threads:
t.join()
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s %(threadName)s] %(message)s',
datefmt='%H:%M:%S')
two_cursors_one_conn()
two_cursors_two_conn()
Note that oursql is an alternative set of MySQL bindings for Python. oursql cursors are true server-side cursors which fetch rows lazily by default. With oursql installed, if you change
conn = mysqldb_conn()
to
conn = oursql_conn()
then two_cursors_one_conn() runs without hanging or raising an exception.

Categories