python database request not updating - python

I am pulling data from a database in a python script which works on start up but does not update when I change the value within the database.
The script initially connects to the database then selects all the contents where id is equal to 1 (this has a number of columns which I intend to use)
It then updates the global variable garageHeating with the contents from the garage column, lets say this is equal to 0 at start up.
The while loop them prints out this variable which on start will be 0
If I them change the column garage within the database to 10 the python script stays at 0. It does not update the global variable which I thought it should do
my script is below
import mysql.connector as mariadb
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="*******",
database="mydb"
)
cursor = mariadb_connection.cursor()
garageHeating = 0
def readDbHeating():
global garageHeating
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
except () as e:
print (e)
while 1:
readDbHeating()
print garageHeating
I have edited the code above shown below to show the global variable outside the function and also to print out the result within the try command. I am still getting the same issue after the first pull of data from the database the script keeps displaying the first set data but does not update to the new data if i change the database records.
import mysql.connector as mariadb
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb"
)
cursor = mariadb_connection.cursor()
global garageHeating
def readDbHeating():
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
print garageHeating
print result
except () as e:
print (e)
while 1:
readDbHeating()
#print garageHeating

You must either call MySQLdb.connections.Connection.commit before executing a query or set autocommit on the connection object.
Commiting before query
def readDbHeating():
global garageHeating
result = []
try:
# commit
mariadb_connection.commit()
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
except () as e:
print (e)
Autocommit when creating a connection
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb",
# Auto commit
autocommit=True
)
Autocommit after connection creation
mariadb_connection.autocommit(True)

I seem to have it working. I have to open the database connection and then close the connection within the function as shown below. This is different to what I have done in the past with mysql but at the moment at least it now works
import mysql.connector as mariadb
import time
garageHeating = 0
def readDbHeating():
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb"
)
cursor = mariadb_connection.cursor()
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
except () as e:
print (e)
finally:
#closing database connection.
if(mariadb_connection.is_connected()):
mariadb_connection.close()
print("connection is closed")
return (result)
while 1:
test = readDbHeating()
print test
time.sleep(1)

Related

psycopg2.errors.InFailedSqlTransaction: current transaction is aborted, commands ignored until end of transaction block, dont know how to fix it

I am trying to insert data into my database using psycopg2 and I get this weird error. I tried some things but nothing works. This is my code:
def insert_transaction():
global username
now = datetime.now()
date_checkout = datetime.today().strftime('%d-%m-%Y')
time_checkout = now.strftime("%H:%M:%S")
username = "Peter1"
connection_string = "host='localhost' dbname='Los Pollos Hermanos' user='postgres' password='******'"
conn = psycopg2.connect(connection_string)
cursor = conn.cursor()
try:
query_check_1 = """(SELECT employeeid FROM employee WHERE username = %s);"""
cursor.execute(query_check_1, (username,))
employeeid = cursor.fetchone()[0]
conn.commit()
except:
print("Employee error")
try:
query_check_2 = """SELECT MAX(transactionnumber) FROM Transaction"""
cursor.execute(query_check_2)
transactionnumber = cursor.fetchone()[0] + 1
conn.commit()
except:
transactionnumber = 1
""""---------INSERT INTO TRANSACTION------------"""
query_insert_transaction = """INSERT INTO transactie (transactionnumber, date, time, employeeemployeeid)
VALUES (%s, %s, %s, %s);"""
data = (transactionnumber, date_checkout, time_checkout, employeeid)
cursor.execute(query_insert_transaction, data)
conn.commit()
conn.close()
this is the error:
", line 140, in insert_transaction
cursor.execute(query_insert_transaction, data) psycopg2.errors.InFailedSqlTransaction: current transaction is aborted, commands ignored until end of transaction block
The error message means that one of the preceding SQL statements has resulted in an error. If an exception occurs while executing an SQL statement you need to call the connection's rollback method (conn.rollback()) to reset the transaction's state. PostgreSQL will not permit further statement execution otherwise.
Ideally you want to record the actual error for later analysis, so your code ought to be structured like this:
try:
cursor.execute(sql, values)
conn.commit()
except Exception as e:
print(f'Error {e}')
print('Anything else that you feel is useful')
conn.rollback()

Python: No of rows are always 9 and does not return affected rows count after UPDATE query

This is not something complicated but not sure why is it not working
import mysql.connector
def get_connection(host, user, password, db_name):
connection = None
try:
connection = mysql.connector.connect(
host=host,
user=user,
use_unicode=True,
password=password,
database=db_name
)
connection.set_charset_collation('utf8')
print('Connected')
except Exception as ex:
print(str(ex))
finally:
return connection
with connection.cursor() as cursor:
sql = 'UPDATE {} set underlying_price=9'.format(table_name)
cursor.execute(sql)
connection.commit()
print('No of Rows Updated ...', cursor.rowcount)
It always returns 0 no matter what. The same query shows correct count on TablePlus
MysQL API provides this method but I do not know how to call it as calling against connection variable gives error
I am not sure why your code does not work. But i am using pymysql, and it works
import os
import pandas as pd
from types import SimpleNamespace
from sqlalchemy import create_engine
import pymysql
PARAM = SimpleNamespace()
PARAM.DB_user='yourname'
PARAM.DB_password='yourpassword'
PARAM.DB_name ='world'
PARAM.DB_ip = 'localhost'
def get_DB_engine_con(PARAM):
DB_name = PARAM.DB_name
DB_ip = PARAM.DB_ip
DB_user = PARAM.DB_user
DB_password = PARAM.DB_password
## engine = create_engine("mysql+pymysql://{user}:{pw}#{ip}/{db}".format(user=DB_user,pw=DB_password,db=DB_name,ip=DB_ip))
conn = pymysql.connect(host=DB_ip, user=DB_user,passwd=DB_password,db=DB_name)
cur = conn.cursor()
return cur, conn ## , engine
cur, conn = get_DB_engine_con(PARAM)
and my data
if i run the code
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL' ".format(table_name)
cur.execute(sql)
conn.commit()
print('No of Rows Updated ...', cur.rowcount)
the result No of Rows Updated ... 10 is printed. and the NLD is changed to NL
If using mysql.connector
import mysql.connector
connection = mysql.connector.connect(
host=PARAM.DB_ip,
user=PARAM.DB_user,
use_unicode=True,
password=PARAM.DB_password,
database=PARAM.DB_name
)
cur = connection.cursor()
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL2' ".format(table_name)
cur.execute(sql)
print('No of Rows Updated ...', cur.rowcount)
connection.commit()
it still works
and the country code is updated to NL2 and No of Rows Updated ... 10 is printed. The second time i run then No of Rows Updated ... 0 is printed.
Not sure why it does not work on your machine.

How to keep continuously write to Oracle table even if table is not accessible?

I am trying to insert multiple records into the one Oracle table continuously. For which I have written below python script.
import cx_Oracle
import config
connection = None
try:
# Make a connection
connection = cx_Oracle.connect(
config.username,
config.password,
config.dsn,
encoding=config.encoding)
# show the version of the Oracle Database
print(connection.version)
# Insert 20000 records
for i in range(1, 20001):
cursor = connection.cursor()
sql = "INSERT into SCHEMA.ABC (EVENT_ID, EVENT_TIME) VALUES( "+ str(i)+" , CURRENT_TIMESTAMP)"
cursor.execute(sql)
connection.commit()
except cx_Oracle.Error as error:
print(error)
finally:
if connection:
connection.close()
So, During the insert, when I change the table name it just create an exception and comes out of script(as the table is not available and cannot write). What I want is, Even if when I do the rename and table is not available, the script needs to keep continuously trying insert. Is there a way this can be possible?
Here's an example of what Ptit Xav was talking about. I added some code to quit after a max number of retries, since that's often desirable.
# Insert 20000 records
for i in range(1, 20001):
retry_count = 0
data_inserted = False
while not data_inserted:
try:
cursor = connection.cursor()
sql = "INSERT into SCHEMA.ABC (EVENT_ID, EVENT_TIME) VALUES( "+ str(i)+" , CURRENT_TIMESTAMP)"
cursor.execute(sql)
connection.commit()
data_inserted = True
except cx_Oracle.Error as error:
print(error)
time.sleep(5) # wait for 5 seconds between retries
retry_count += 1
if retry_count > 100:
print(f"Retry count exceeded on record {i}, quitting")
break
else:
# continue to next record if the data was inserted
continue
# retry count was exceeded; break the for loop.
break
See this answer for more explanation of the while... else logic.
You may want to encapsule the insert logik in a function that catches the possible exception and performs the retry
def safe_insert(con, i):
"""
insert a row with retry after exception
"""
retry_cnt = 0
sql_text = "insert into ABC(EVENT_ID, EVENT_TIME) VALUES(:EVENT_ID,CURRENT_TIMESTAMP) "
while True:
try:
with con.cursor() as cur:
cur.execute(sql_text, [i])
con.commit()
return
except cx_Oracle.Error as error:
print(f'error on inserting row {i}')
print(error)
time.sleep(1)
retry_cnt += 1
if (retry_cnt > 10):
raise error
Similar to #kfinity's answer I also added a limit on retry - if this limit is exceeded the function raise an exception.
Note also that the function uses bind variables in the INSERT statement which is preferable to the concatenation of the values in the statement.
The usage is as simple as
for i in range(1, 20001):
safe_insert(con, i)

Updating results from a mysql-connector fetchall

I'm trying to select certain records from the civicrm_address table and update the geocode columns. I use fetchall to retrieve the rows then, within the same loop, I try to update with the results of the geocoder API, passing the civicrm_address.id value in the update_sql statement.
The rowcount after the attempted update and commit is always -1 so I am assuming it failed for some reason but I have yet to figure out why.
import geocoder
import mysql.connector
mydb = mysql.connector.connect(
[redacted]
)
mycursor = mydb.cursor(dictionary=True)
update_cursor = mydb.cursor()
sql = """
select
a.id
, street_address
, city
, abbreviation
from
civicrm_address a
, civicrm_state_province b
where
location_type_id = 6
and
a.state_province_id = b.id
and
street_address is not null
and
city is not null
limit 5
"""
mycursor.execute(sql)
rows = mycursor.fetchall()
print(mycursor.rowcount, "records selected")
for row in rows:
address_id = int(row["id"])
street_address = str(row["street_address"])
city = str(row["city"])
state = str(row["abbreviation"])
myaddress = street_address + " " + city + ", " + state
g = geocoder.arcgis(myaddress)
d = g.json
latitude = d["lat"]
longitude = d["lng"]
update_sql = """
begin work;
update
civicrm_address
set
geo_code_1 = %s
, geo_code_2 = %s
where
id = %s
"""
var=(latitude, longitude, address_id)
print(var)
update_cursor.execute(update_sql, var, multi=True)
mydb.commit()
print(update_cursor.rowcount)
mycursor.close()
update_cursor.close()
mydb.close()
Here is a simpler script:
I have executed the update_sql statement directly in the MySQL workbench and it succeeds. It is not working from Python.
import geocoder
import mysql.connector
try:
mydb = mysql.connector.connect(
[redacted]
)
mycursor = mydb.cursor(dictionary=True)
update_cursor = mydb.cursor()
update_sql = """
begin work;
update
civicrm_address
set
geo_code_1 = 37.3445
, geo_code_2 = -118.5366074
where
id = 65450;
"""
update_cursor.execute(update_sql, multi=True)
mydb.commit()
print(update_cursor.rowcount, "row(s) were updated")
except mysql.connector.Error as error:
print("Failed to update record to database: {}".format(error))
mydb.rollback()
finally:
# closing database connection.
if (mydb.is_connected()):
mydb.close()
I have it working now. I did remove the "begin work" statement but not the multi=True and it wouldn't work. Later I removed the multi=True statement and it works.

Python Not inserting into MySQL

import MySQLdb
import time
try:
db = MySQLdb.connect(host="", #your host, usually localhost
user="", #your username
passwd="", #your password
db="") #name of the data base
cur = db.cursor()
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
SQL = "INSERT INTO TBL_PYTest (Time) VALUES (%s)"
Count = 0
while Count < 5:
UTime = int(time.time())
print UTime
cur.execute(SQL, (UTime))
time.sleep(5)
Count = Count + 1
print Count
Why isn't this working? its printing correctly but the database stays empty.
Ive checked the DB and it seems fine
All the details are correct
You would need to commit your transaction , or set autocommit as True.

Categories