web app make db on first run of python code - python

I have an aiohttp app where when ran the first time it will attempt to create an sqlite db if it doesnt exist on a try_make_db() function. I also don't think an exception error is a good way to do this either...
Could someone give me a tip on how to make this better?
When run the file I get a traceback:
Traceback (most recent call last):
File "app.py", line 280, in <module>
try_make_db()
File "app.py", line 250, in try_make_db
with sqlite3.connect(my_path) as conn:
sqlite3.OperationalError: unable to open database file
I dont think this has anything to do with aiohttp or sqlite but more of a PATH question.
This isn't the entire code but only near the bottom, here's a gist of the entire script:
def try_make_db() -> None:
my_path = './form_data.db'
with sqlite3.connect(my_path) as conn:
cur = conn.cursor()
print("Trying to connect to the db!")
try:
cur.execute("SELECT 1 FROM posts LIMIT 1;")
cur.close()
print("Good enough DB should be Ok")
except Exception as e:
print("Table 'posts' does not exist")
cur.execute(
"""CREATE TABLE posts (
Date TEXT PRIMARY KEY,
starttime TEXT,
endtime TEXT,
Weekends NUMBER,
Weekdays NUMBER,
setpoint NUMBER)
"""
)
print("DB TABLE CREATED")
conn.commit()
async def init_db(app: web.Application) -> AsyncIterator[None]:
sqlite_db = 'form_data.db'
db = await aiosqlite.connect(sqlite_db)
db.row_factory = aiosqlite.Row
app["DB"] = db
yield
await db.close()
try_make_db()
web.run_app(init_app() , host='0.0.0.0', port=8080)
Any tips greatly appreciated not a lot of wisdom here. Would anyone have a tip on how I could modify the try_to_make_db function maybe something like this:
import os.path
if os.path.exists(PATH_TO_DB): # replace the path here
print("Path exists")

check write permissions on form_data.db also you don't need to use ./ to explicitly state your current working directory, just using the name form_data.db will create the file in same dir as your main program.

Related

Python mysql.connector cursor.execute() and connection.commit() not working in a loop

Trying to automate working process with the tables in MySQL using for-loop
from mysql.connector import connect, Error
def main():
try:
with connect(host="host", user="user",password="password") as connection:
connection.autocommit = True
no_pk_tables_query = """
select tab.table_schema as database_name,
tab.table_name
from information_schema.tables tab
left join information_schema.table_constraints tco
on tab.table_schema = tco.table_schema
and tab.table_name = tco.table_name
and tco.constraint_type = 'PRIMARY KEY'
where tco.constraint_type is null
and tab.table_schema not in('mysql', 'information_schema',
'performance_schema', 'sys')
and tab.table_type = 'BASE TABLE'
order by tab.table_schema,
tab.table_name;
"""
tables_to_cure = []
with connection.cursor() as cursor:
cursor.execute(no_pk_tables_query)
for table in cursor:
tables_to_cure.append(table[1])
print(table[1])
for s_table in tables_to_cure:
cure = """
USE mission_impossible;
ALTER TABLE `{}` MODIFY `ID` int(18) NOT NULL auto_increment PRIMARY KEY;
""".format(s_table)
cursor.execute(cure)
print("Cured {}".format(s_table))
except Error as e:
print(e)
finally:
print("End")
main()
And I get:
quote 2014 (HY000): Commands out of sync; you can't run this command now
If I add connection.commit() inside the for-loop after cursor.execute() I'll get:
_mysql_connector.MySQLInterfaceError: Commands out of sync; you can't run this command now
Does this mean that I'll have to use new connections inside loop instead of cursor?
I've looked it up and found some methods like fetchall() and nextset() but they seem to do other things than simply refreshing current cursor data.
Using connection.autocommit = True seem not to work either as the same error occurs.
Using something like sleep() also doesn't help.
What am I doing wrong here?
Edit
Getting rid of try/except didn't help:
File "/usr/local/lib/python3.8/dist-packages/mysql/connector/connection_cext.py", line 523, in cmd_query
self._cmysql.query(query,
_mysql_connector.MySQLInterfaceError: Commands out of sync; you can't run this command now
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "db.py", line 40, in <module>
main()
File "db.py", line 36, in main
cursor.execute(cure)
File "/usr/local/lib/python3.8/dist-packages/mysql/connector/cursor_cext.py", line 269, in execute
result = self._cnx.cmd_query(stmt, raw=self._raw,
File "/usr/local/lib/python3.8/dist-packages/mysql/connector/connection_cext.py", line 528, in cmd_query
raise errors.get_mysql_exception(exc.errno, msg=exc.msg,
mysql.connector.errors.DatabaseError: 2014 (HY000): Commands out of sync; you can't run this command now
Fixed:
Seems like I've finally figured it out, it's needed to get results from the cursor using fetchall() instead of directly addressing the cursor as an iterator.
with connection.cursor() as cursor:
cursor.execute(no_pk_tables_query)
rows = cursor.fetchall()
with connection.cursor() as cursor:
for table in rows:
try:
print(table[1])
cure = """
ALTER TABLE `{}` MODIFY `ID` int(18) NOT NULL auto_increment PRIMARY KEY;
""".format(table[1])
cursor.execute(cure)
res = cursor.fetchall()
print(res)
except Error as e:
print(e)
Thx everybody
Here's some sample code that shows how the "Commands out of sync" error can occur:
from mysql.connector import connect, Error
# replace asterisks in the CONFIG dictionary with your data
CONFIG = {
'user': '*',
'password': '*',
'host': '*',
'database': '*',
'autocommit': False
}
try:
with connect(**CONFIG) as conn:
try:
with conn.cursor() as cursor:
cursor.execute('select * from ips')
# cursor.fetchall()
finally:
conn.commit()
except Error as e:
print(e)
Explanation:
The code selects all rows from a table called "ips" the contents of which are irrelevant here.
Now, note that we do not attempt to get a rowset (fetchall is commented out). We then try to commit the transaction (even though no changes were made to the table).
This will induce the "Commands out of sync" error.
However, if we take out the comment line and fetch the rowset (fetchall) this problem does not arise.
Explicitly fetching the rowset is equivalent to iterating over the cursor.
If we change the autocommit parameter to True and remove the explicit commit(), we get another error:- "Unread result found".
In other words, it seems that MySQL requires you to get the rowset (or iterate over the cursor) whenever you select anything!
Note that even if autocommit is enabled (True) explicit calls to commit() are permitted
Solutions:
Either ensure that the client application iterates over the entire cursor after SELECT or in the CONFIG dictionary add: 'consume_results': True

How to execute a MySQL query with a python script using the MySQLdb library?

I tried to modify an ETL but I have found that the old developer executes his commands directly on the connection (the ETL has been running for a few years). When I try to do it myself I get an error (because my compiler expects me to do it from a cursor).
from etl.utils.logging import info
from etl.mysql.connect import db, db_name
from etl.mysql.operations import add_column_if_not_exists
from etl.utils.array import chunks
from pprint import pprint
def add_column_exclude_from_statistics():
with db as c:
# Create new columns where exclude_from_statistics
info("Creating column exclude from statistics")
c.execute("""
UPDATE orders
INNER JOIN contacts ON orders.id = contacts.`Contact ID`
IF contacts.`Great Benefactor` = true OR orders.Campaign = `nuit-pour-la-mission`
SET orders.exclude_from_statistics = 1
ELSE
SET orders.exclude_from_statistics = 0
;
""")
def main():
info("Table crm.orders")
add_column_exclude_from_statistics()
if __name__ == '__main__':
main()
But it returns that 'Connection' object has no attribute 'execute':
(venv) C:\Users\antoi\Documents\Programming\Work\data-tools>py -m etl.task.crm_orders_exclude_from_statistics
2021-06-25 17:12:44.357297 - Connecting to database hozana_data...
2021-06-25 17:12:44.365267 - Connecting to archive database hozana_archive...
2021-06-25 17:12:44.365267 - Table crm.orders
2021-06-25 17:12:44.365267 - Creating column exclude from statistics
Traceback (most recent call last):
File "C:\Users\antoi\AppData\Local\Programs\Python\Python39\lib\runpy.py", line 197, in _run_module_as_main
return _run_code(code, main_globals, None,
File "C:\Users\antoi\AppData\Local\Programs\Python\Python39\lib\runpy.py", line 87, in _run_code
exec(code, run_globals)
File "C:\Users\antoi\Documents\Programming\Work\data-tools\etl\task\crm_orders_exclude_from_statistics.py", line 28, in <module>
main()
File "C:\Users\antoi\Documents\Programming\Work\data-tools\etl\task\crm_orders_exclude_from_statistics.py", line 24, in main
add_column_exclude_from_statistics()
File "C:\Users\antoi\Documents\Programming\Work\data-tools\etl\task\crm_orders_exclude_from_statistics.py", line 12, in add_column_exclude_from_statistic
s
c.execute("""
AttributeError: 'Connection' object has no attribute 'execute'
Here is what we have in etl.mysql.connect
import os
import MySQLdb
from etl.utils.logging import info
db_host = os.environ['DB_HOST']
db_port = int(os.environ['DB_PORT'])
db_user = os.environ['DB_USER']
db_password = os.environ['DB_PASSWORD']
db_name = os.environ['DB_NAME']
db_name_archive = os.environ['DB_ARCHIVE_NAME']
info("Connecting to database {}...".format(db_name))
db = MySQLdb.connect(host=db_host,
port=db_port,
db=db_name,
user=db_user,
passwd=db_password)
It is strange to have done that, isn't it? Is it my MySQLdb library that is not up to date?
Here are the MySQL related libraries. I did not find MySQLdb:
(venv) C:\Users\antoi\Documents\Programming\Work\data-tools>pip list |findstr mysql
mysql 0.0.3
mysql-connector-python 8.0.25
mysqlclient 2.0.3
According to the documentation you first need to create a cursor after the connection is open as the 'Connection' object does not have the execute method but the Cursor one does, therefore using your code sample:
from etl.utils.logging import info
from etl.mysql.connect import db, db_name
from etl.mysql.operations import add_column_if_not_exists
from etl.utils.array import chunks
from pprint import pprint
def add_column_exclude_from_statistics():
with db as c:
# Create new columns where exclude_from_statistics
info("Creating column exclude from statistics")
cursor = c.cursor() #Get the cursor
cursor.execute("""
UPDATE orders
INNER JOIN contacts ON orders.id = contacts.`Contact ID`
IF contacts.`Great Benefactor` = true OR orders.Campaign = `nuit-pour-la-mission`
SET orders.exclude_from_statistics = 1
ELSE
SET orders.exclude_from_statistics = 0
;
""")
def main():
info("Table crm.orders")
add_column_exclude_from_statistics()
if __name__ == '__main__':
main()

Error while transferring data from sqlite to elasticsearch

Hello to all members of the forum
There is a script that takes data from the db.sqlite database and transfers it to the elasticsearch database, which falls into an error related to the ContextManager, it does not reach how to solve.
let's get it in order:
there is a db.sqlite database
there is a running elasticsearch server on the locale - checked it works
the script takes data from the database and pushes it to the elasticsearch database
elasticsearch prepared for push, created index with correct schema
here is the script code: https://dpaste.org/TFNd
here is the error:
Traceback (most recent call last):
File "D: \ Yandex_Midl \ p1 \ src \ script.py", line 240, in <module>
start.load (index)
File "D: \ Yandex_Midl \ p1 \ src \ script.py", line 221, in load
writers = self.load_writers_names ()
File "D: \ Yandex_Midl \ p1 \ src \ script.py", line 153, in load_writers_names
for writer in self.conn.execute ('' 'SELECT DISTINCT id, name FROM writers' ''):
AttributeError: '_GeneratorContextManager' object has no attribute 'execute'
as far as I know, the problem is hidden in the function: conn_context
but I can't figure out how to solve it.
can anyone help me
thank you in advance
The problem I see is in this function:
def conn_context(db_path: str):
​
print("def conn_context")
conn = sqlite3.connect(db_path)
conn.row_factory = dict_factory
# yield conn
# conn.close()
try:
yield conn
except:
print("exception")
​
conn.close()
Nothing hidden really, you are asigning the returning value of this function to connn in this line:
connn = conn_context(db_path)
But what you are returning is a generator object because of the yielding, not the connection. Yield is for another purpose. Pro tip: that yield will not raise an exception so no try required.
You must return conn, and close it when your script ends, not inside this function.
def conn_context(db_path: str):​
print("def conn_context")
conn = sqlite3.connect(db_path)
conn.row_factory = dict_factory
return conn
connn = conn_context(db_path)
# Do your stuff
connn.close()

Can't get mysql results more than once

I'm trying so hard to get my socket server python script to loop every so often to check for updates in a mysql table.
The code works on the first time no problem. on the second loop and every loop after it throws errors.
Things i've tried:
try/catch (for multiple loops to see if ANY work)
threading
conn.close()
cursor.close() (not cursor.commit() any changes so this through
errors of course)
However, I can put the code in a stand alone file and spam running the file and it works perfectly.
It doesn't seem to like running the sql code in the same process or file (which i thought threading fixed but i guess i was wrong?)
Here is the error: (note the first line is the output i'm trying to print in a loop for testing)
(17, 'Default2', 1, 'uploads/2/projects/5e045c87109820.19290695.blend', '')
Exception in thread Thread-1:
Traceback (most recent call last):
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.7_3.7.1776.0_x64__qbz5n2kfra8p0\lib\threading.py", line 926, in _bootstrap_inner
self.run()
File "C:\Program Files\WindowsApps\PythonSoftwareFoundation.Python.3.7_3.7.1776.0_x64__qbz5n2kfra8p0\lib\threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "D:\xampp\htdocs\urender\serverfiles\test.py", line 7, in func
fqueue = queuedb.checkQueue()
File "D:\xampp\htdocs\urender\serverfiles\queuedb.py", line 7, in checkQueue
cursor = conn.cursor()
File "C:\Users\hackn\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.7_qbz5n2kfra8p0\LocalCache\local-packages\Python37\site-packages\mysql\connector\connection.py", line 806, in cursor
self.handle_unread_result()
File "C:\Users\hackn\AppData\Local\Packages\PythonSoftwareFoundation.Python.3.7_qbz5n2kfra8p0\LocalCache\local-packages\Python37\site-packages\mysql\connector\connection.py", line 1059, in handle_unread_result
raise errors.InternalError("Unread result found")
mysql.connector.errors.InternalError: Unread result found
[Finished in 6.727s]
Here is the basic code from the test.py:
import queuedb
from threading import Thread
import time
def func():
time.sleep(5)
fqueue = queuedb.checkQueue()
return fqueue
func()
fqueue = queuedb.checkQueue()
print(fqueue)
Thread(target=func).start()
This is from my queuedb.py:
from dbconnect import dbconnect
import sys
def checkQueue():
sql = "SELECT * FROM renderqueue WHERE renderer=''"
conn = dbconnect.conn
cursor = conn.cursor()
cursor.execute(sql)
result = cursor.fetchone()
return result
And this is the dbconnect.py:
import mysql.connector
import sys
from xml.dom import minidom
def parseXML():
try:
xmlpath = "urender/serverfiles/dbvariables.xml"
mydoc = minidom.parse(xmlpath)
items = mydoc.getElementsByTagName('item')
dbserver = items[0].attributes['dbserver'].value
dbuser = items[1].attributes['dbuser'].value
dbpass = items[2].attributes['dbpass'].value
dbname = items[3].attributes['dbname'].value
return dbserver, dbuser, dbpass, dbname
except:
print("Something went wrong with the XML DATA")
sys.exit()
dbserver = parseXML()[0]
dbuser = parseXML()[1]
dbpass = parseXML()[2]
dbname = parseXML()[3]
class dbconnect:
conn = mysql.connector.connect(host=dbserver, user=dbuser, passwd=dbpass, database=dbname)
I'm sorry for such a long post but I hope i've explained the problem well enough and given an adequate amount of info.
hckm101,
As indicated by the exception, there are unread rows associated with your cursor.
To solve this, you have two solutions :
Use a buffered cursor, replacing your code with
conn.cursor(buffered=True)
Or, retrieve every result associated to your cursor using a for loop with something like : for row in cursor: dosomething(row)
For more information, there is plenty of documentation available online (here)

mysqldb interfaceError

I have a very weird problem with mysqldb (mysql module for python).
I have a file with queries for inserting records in tables. If I call the functions from the file, it works just fine; but when trying to call one of the functions from another file it throws me a
_mysql_exception.InterfaceError: (0, '')
I really don't get what I'm doing wrong here..
I call the function from buildDB.py :
import create
create.newFormat("HD", 0,0,0)
The function newFormat(..) is in create.py (imported) :
from Database import Database
db = Database()
def newFormat(name, width=0, height=0, fps=0):
format_query = "INSERT INTO Format (form_name, form_width, form_height, form_fps) VALUES ('"+name+"',"+str(width)+","+str(height)+","+str(fps)+");"
db.execute(format_query)
And the class Database is the following :
import MySQLdb
from MySQLdb.constants import FIELD_TYPE
class Database():
def __init__(self):
server = "localhost"
login = "seq"
password = "seqmanager"
database = "Sequence"
my_conv = { FIELD_TYPE.LONG: int }
self.conn = MySQLdb.connection(host=server, user=login, passwd=password, db=database, conv=my_conv)
# self.cursor = self.conn.cursor()
def close(self):
self.conn.close()
def execute(self, query):
self.conn.query(query)
(I put only relevant code)
Traceback :
Z:\sequenceManager\mysql>python buildDB.py
D:\ProgramFiles\Python26\lib\site-packages\MySQLdb\__init__.py:34: DeprecationWa
rning: the sets module is deprecated
from sets import ImmutableSet
INSERT INTO Format (form_name, form_width, form_height, form_fps) VALUES ('HD',0
,0,0);
Traceback (most recent call last):
File "buildDB.py", line 182, in <module>
create.newFormat("HD")
File "Z:\sequenceManager\mysql\create.py", line 52, in newFormat
db.execute(format_query)
File "Z:\sequenceManager\mysql\Database.py", line 19, in execute
self.conn.query(query)
_mysql_exceptions.InterfaceError: (0, '')
The warning has never been a problem before so I don't think it's related.
I got this error when I was trying to use a closed connection.
Problem resolved.. I was initializing the database twice.. Sorry if you lost your time reading this !
I couldn't get your setup to work. I gives me the same error all the time. However the way you connect to and make queries to the db with the query seems to be "non-standard".
I had better luck with this setup:
conn = MySQLdb.Connection(user="user", passwd="******",
db="somedb", host="localhost")
cur = conn.cursor()
cur.execute("insert into Format values (%s,%s,%s,%s);", ("hd",0,0,0))
This way you can take advantage of the db modules input escaping which is a must to mitigate sql injection attacks.

Categories