import MySQLdb
import time
try:
db = MySQLdb.connect(host="", #your host, usually localhost
user="", #your username
passwd="", #your password
db="") #name of the data base
cur = db.cursor()
except mysql.connector.Error as err:
print("Something went wrong: {}".format(err))
SQL = "INSERT INTO TBL_PYTest (Time) VALUES (%s)"
Count = 0
while Count < 5:
UTime = int(time.time())
print UTime
cur.execute(SQL, (UTime))
time.sleep(5)
Count = Count + 1
print Count
Why isn't this working? its printing correctly but the database stays empty.
Ive checked the DB and it seems fine
All the details are correct
You would need to commit your transaction , or set autocommit as True.
Related
I am trying to insert data into my database using psycopg2 and I get this weird error. I tried some things but nothing works. This is my code:
def insert_transaction():
global username
now = datetime.now()
date_checkout = datetime.today().strftime('%d-%m-%Y')
time_checkout = now.strftime("%H:%M:%S")
username = "Peter1"
connection_string = "host='localhost' dbname='Los Pollos Hermanos' user='postgres' password='******'"
conn = psycopg2.connect(connection_string)
cursor = conn.cursor()
try:
query_check_1 = """(SELECT employeeid FROM employee WHERE username = %s);"""
cursor.execute(query_check_1, (username,))
employeeid = cursor.fetchone()[0]
conn.commit()
except:
print("Employee error")
try:
query_check_2 = """SELECT MAX(transactionnumber) FROM Transaction"""
cursor.execute(query_check_2)
transactionnumber = cursor.fetchone()[0] + 1
conn.commit()
except:
transactionnumber = 1
""""---------INSERT INTO TRANSACTION------------"""
query_insert_transaction = """INSERT INTO transactie (transactionnumber, date, time, employeeemployeeid)
VALUES (%s, %s, %s, %s);"""
data = (transactionnumber, date_checkout, time_checkout, employeeid)
cursor.execute(query_insert_transaction, data)
conn.commit()
conn.close()
this is the error:
", line 140, in insert_transaction
cursor.execute(query_insert_transaction, data) psycopg2.errors.InFailedSqlTransaction: current transaction is aborted, commands ignored until end of transaction block
The error message means that one of the preceding SQL statements has resulted in an error. If an exception occurs while executing an SQL statement you need to call the connection's rollback method (conn.rollback()) to reset the transaction's state. PostgreSQL will not permit further statement execution otherwise.
Ideally you want to record the actual error for later analysis, so your code ought to be structured like this:
try:
cursor.execute(sql, values)
conn.commit()
except Exception as e:
print(f'Error {e}')
print('Anything else that you feel is useful')
conn.rollback()
This is not something complicated but not sure why is it not working
import mysql.connector
def get_connection(host, user, password, db_name):
connection = None
try:
connection = mysql.connector.connect(
host=host,
user=user,
use_unicode=True,
password=password,
database=db_name
)
connection.set_charset_collation('utf8')
print('Connected')
except Exception as ex:
print(str(ex))
finally:
return connection
with connection.cursor() as cursor:
sql = 'UPDATE {} set underlying_price=9'.format(table_name)
cursor.execute(sql)
connection.commit()
print('No of Rows Updated ...', cursor.rowcount)
It always returns 0 no matter what. The same query shows correct count on TablePlus
MysQL API provides this method but I do not know how to call it as calling against connection variable gives error
I am not sure why your code does not work. But i am using pymysql, and it works
import os
import pandas as pd
from types import SimpleNamespace
from sqlalchemy import create_engine
import pymysql
PARAM = SimpleNamespace()
PARAM.DB_user='yourname'
PARAM.DB_password='yourpassword'
PARAM.DB_name ='world'
PARAM.DB_ip = 'localhost'
def get_DB_engine_con(PARAM):
DB_name = PARAM.DB_name
DB_ip = PARAM.DB_ip
DB_user = PARAM.DB_user
DB_password = PARAM.DB_password
## engine = create_engine("mysql+pymysql://{user}:{pw}#{ip}/{db}".format(user=DB_user,pw=DB_password,db=DB_name,ip=DB_ip))
conn = pymysql.connect(host=DB_ip, user=DB_user,passwd=DB_password,db=DB_name)
cur = conn.cursor()
return cur, conn ## , engine
cur, conn = get_DB_engine_con(PARAM)
and my data
if i run the code
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL' ".format(table_name)
cur.execute(sql)
conn.commit()
print('No of Rows Updated ...', cur.rowcount)
the result No of Rows Updated ... 10 is printed. and the NLD is changed to NL
If using mysql.connector
import mysql.connector
connection = mysql.connector.connect(
host=PARAM.DB_ip,
user=PARAM.DB_user,
use_unicode=True,
password=PARAM.DB_password,
database=PARAM.DB_name
)
cur = connection.cursor()
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL2' ".format(table_name)
cur.execute(sql)
print('No of Rows Updated ...', cur.rowcount)
connection.commit()
it still works
and the country code is updated to NL2 and No of Rows Updated ... 10 is printed. The second time i run then No of Rows Updated ... 0 is printed.
Not sure why it does not work on your machine.
I am trying to insert values into a table within my redshift cluster, it is connected as I can read the table but I can't insert values on it. When I use SELECT statements it works fine but when I try to insert values from lambda function, it is aborted with no error or log info about why was it aborted.
The query part is like this:
conn = psycopg2.connect(dbname = 'dev',
host =
'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%')
cur = conn.cursor()
cur.execute("INSERT INTO public.summaries(topic,summary)
values('data', 'data_summary');")
#print(cur.fetchone())
cur.close()
conn.close()
As I said, there is no log information about why was it aborted, neither it is giving me any kind of error. Actually, when I just use a Select statement, it works.
Is there anyone who can guide me through what could be going on?
You forgot to do conn.commit()
conn = psycopg2.connect(dbname = 'dev',
host = 'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%')
cur = conn.cursor()
cur.execute("INSERT INTO public.summaries(topic,summary) values('data', 'data_summary');")
cur.close()
conn.commit()
conn.close()
a bit improved way to run this
from contextlib import contextmanager
#contextmanager
def cursor():
with psycopg2.connect(dbname = 'dev',
host = 'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%') as conn:
try:
yield conn.cursor()
finally:
try:
conn.commit()
except psycopg2.InterfaceError:
pass
def run_insert(query):
with cursor() as cur:
cur.execute(query)
cur.close()
run_insert("INSERT INTO public.summaries(topic,summary) values('data', 'data_summary');")
I am pulling data from a database in a python script which works on start up but does not update when I change the value within the database.
The script initially connects to the database then selects all the contents where id is equal to 1 (this has a number of columns which I intend to use)
It then updates the global variable garageHeating with the contents from the garage column, lets say this is equal to 0 at start up.
The while loop them prints out this variable which on start will be 0
If I them change the column garage within the database to 10 the python script stays at 0. It does not update the global variable which I thought it should do
my script is below
import mysql.connector as mariadb
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="*******",
database="mydb"
)
cursor = mariadb_connection.cursor()
garageHeating = 0
def readDbHeating():
global garageHeating
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
except () as e:
print (e)
while 1:
readDbHeating()
print garageHeating
I have edited the code above shown below to show the global variable outside the function and also to print out the result within the try command. I am still getting the same issue after the first pull of data from the database the script keeps displaying the first set data but does not update to the new data if i change the database records.
import mysql.connector as mariadb
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb"
)
cursor = mariadb_connection.cursor()
global garageHeating
def readDbHeating():
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
print garageHeating
print result
except () as e:
print (e)
while 1:
readDbHeating()
#print garageHeating
You must either call MySQLdb.connections.Connection.commit before executing a query or set autocommit on the connection object.
Commiting before query
def readDbHeating():
global garageHeating
result = []
try:
# commit
mariadb_connection.commit()
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
garageHeating = result[0][8]
except () as e:
print (e)
Autocommit when creating a connection
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb",
# Auto commit
autocommit=True
)
Autocommit after connection creation
mariadb_connection.autocommit(True)
I seem to have it working. I have to open the database connection and then close the connection within the function as shown below. This is different to what I have done in the past with mysql but at the moment at least it now works
import mysql.connector as mariadb
import time
garageHeating = 0
def readDbHeating():
mariadb_connection = mariadb.connect(
host="localhost",
user="garage",
passwd="14Odiham",
database="mydb"
)
cursor = mariadb_connection.cursor()
result = []
try:
cursor.execute ("SELECT * FROM heating WHERE id = '1'")
for reading in cursor.fetchall():
result.append (reading)
except () as e:
print (e)
finally:
#closing database connection.
if(mariadb_connection.is_connected()):
mariadb_connection.close()
print("connection is closed")
return (result)
while 1:
test = readDbHeating()
print test
time.sleep(1)
This is a very straight forward question regarding how to insert or select data from/to a database ? Since i'm trying to keep my code as clean as possible, this is how i'm actually performing queries and inserts/updates:
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select and insert
# this func should be called like:
# db_call('c:\dbconf.cfg','select * from something')
# or insert / update statement.
#------------------------------------------------------------
def db_call(cfgFile, sql):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
try:
cur.execute(sql)
if re.match(r'INSERT|insert|UPDATE|update|DELETE|delete', sql):
con.commit()
else:
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
else:
con.commit()
finally:
con.close()
But, using this method i have to keep all the queries inside my main class, where that can be a problem if any change happens into the table structure,
But, going for sp call, i can have the code more clean, passing only the sp name and fields. But sometimes this could lead me to have one python function for more specific cases, ( as an example, sp that receives 2,3 or 4 inputs must have diferent python functions for each )
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select only!!!!!!
# this func should be called like:
# db_call('fn_your_function','field_a','field_b')
# or insert / update statement.
#------------------------------------------------------------
def db_call(self, cfgFile, query):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
cur.callproc(query[0], (query[1],query[2]))
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
con.close()
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
Im not sure if here is the right place to ask this, but before voting to close it (if is the case ) please reply with the information where i could ask this kind of question :)
Thanks in advance.
If your goal is to abstract away the schema of your DB from your objects' implementations, you should probably be looking at ORMs/persistence frameworks. There are a number of them in Python. As examples, SQLAlchemy is popular and Django, a popular web framework, has one built in.