I want to insert some records to mySQL database from PostgreSQL(both are remote servers),So i'm using the below script but the case is that the data is not getting inserted into MySQL database.
There is a final count statement which always results zero.May i know what could be the reason?
Also any improvement need to do on this code.
Suggestions please.
import psycopg2
import os
import time
#import MySQLdb
from mysql.connector import (connection)
import sys
#from pprint import pprint
#from datetime import datetime
#from psycopg2 import sql
#from utils.config import Configuration as Config
#from utils.postgres_helper import get_connection
#from utils.utils import get_global_config
def db_connect():
# MySQLdb connection
try:
source_host = 'magento'
#conf = get_global_config()
#cnx_msql = MySQLdb.connect(host=conf.get(source_host, 'host'),
#user=conf.get(source_host, 'user'),
#passwd=conf.get(source_host, 'password'),
#port=int(conf.get(source_host, 'port')),
#db=conf.get(source_host, 'db'))
cnx_msql = connection.MySQLConnection(host='server.com', user='db13009',passwd='fgDT***********',port=3306,db='db13009')
print('MySQL DB connected')
except mysql.connector.Error as e:
print ("MYSQL: Unable to connect!", e.msg)
sys.exit(1)
# Postgresql connection
try:
#cnx_psql = get_connection(get_global_config(), 'pg_dwh')
cnx_psql =psycopg2.connect(host='xxx.xxx.xx.xx',
dbname='postgres',
port='5432',
user='postgres',
password='*********')
print('PSQL DB connected')
except psycopg2.Error as e:
print('PSQL: Unable to connect!\n{0}').format(e)
sys.exit(1)
# Cursors initializations
cur_msql = cnx_msql.cursor()
cur_psql = cnx_psql.cursor()
SQL_test="""SELECT count(*) from action_status;"""
cur_msql.execute(SQL_test)
records = cur_msql.fetchall()
for row in records:
print("count = ", row[0], )
msql_command=""
try:
SQL_load="""SELECT created_at,payload from staging.sync;"""
msql_ins="""INSERT INTO action_status(payload,created_at)VALUES (%s, %s) """
cur_psql.execute(SQL_load)
for row in cur_psql:
try:
print(row[0])
print(row[1])
cur_msql.execute(msql_ins, row[0],row[1])
except psycopg2.Error as e:
print('ffffffffffffff')
print ("Cannot execute the query!!", e.pgerror)
sys.exit(1)
cnx_msql.commit()
cur_msql.execute(SQL_test)
records = cur_msql.fetchall()
for row in records:
print("count = ", row[0], )
except (Exception, psycopg2.Error) as error:
print ("Error while fetching data from PostgreSQL", error)
finally:
## Closing cursors
cur_msql.close()
cur_psql.close()
## Committing
cnx_psql.commit()
## Closing database connections
cnx_msql.close()
cnx_psql.close()
if __name__ == '__main__':
db_connect()
msql_ins is missing semicolon (not sure whether required or not). More importantly, you are missing a tuple; instead of:
cur_msql.execute(msql_ins, row[0],row[1])
try this instead:
cur_msql.execute(msql_ins, (row[0], row[1]))
Hope that helps :)
Related
I want to write some basic code to do querys on read only mode on sqlite databases
These are daily db files so it is important after closing the connections not to leave other files in the server like db-shm or db-wal files associated
I have been reading the documentation and it seems that even though I try to close the connections explicitly, these are not closed so these files stay there
import sqlite3
import pandas as pd
def _connect(f):
con = None
try:
con = sqlite3.connect("file:"+f+"?mode=ro", uri=True)
except sqlite3.Error as er:
print('SQLite error in the db connection: %s' % (' '.join(er.args)))
return con
def _disconnect(con):
try:
con.close()
except sqlite3.Error as er:
print('SQLite error in the db disconnection: %s' % (' '.join(er.args)))
return con
def fl_query(file):
'''Returns information about a db file
file : string
absolute path to the db file
returns
-------
list
'''
cn = _connect(file)
cur = cn.cursor()
query = """SELECT ....etc"""
cur.execute(query)
info = [(row[0],row[1],row[2],row[3],row[4],row[5]) for row in cur.fetchall()]
cur.close()
_disconnect(cn)
return info
folder = 'path to the db file'
file = folder+'6100000_2022-09-18.db'
info = fl_query(file)
I have read about how to close cleanly the databases but so far nothing works and db-shm and db-wal stay there everytime I open a file. Remark: it is a server with thousand of files so it is important not to create more files
I realized that I was unable to modify the PRAGMA Journal because I was opening the database in read only mode. In read-write mode it performs the modification of the PRAGMA journal to DELETE mode
import sqlite3
import pandas as pd
def _connect(f):
con = None
try:
con = sqlite3.connect("file:"+f+"?mode=rw", uri=True)
except sqlite3.Error as er:
print('SQLite error in the db connection: %s' % (' '.join(er.args)))
return con
def _disconnect(con):
try:
con.close()
except sqlite3.Error as er:
print('SQLite error in the db disconnection: %s' % (' '.join(er.args)))
return con
def fl_query(file):
'''Returns information about a db file
file : string
absolute path to the db file
returns
-------
list
'''
cn = _connect(file)
cur = cn.cursor()
cur.execute("PRAGMA journal_mode=DELETE")
query = """SELECT ....etc"""
cur.execute(query)
info = [(row[0],row[1],row[2],row[3],row[4],row[5]) for row in cur.fetchall()]
cur.close()
_disconnect(cn)
I am trying to store some data in MYSQL workbench database using python script, but I got the following error when I am trying to insert (anomaly_low, anomaly_medium, anomaly_high) in the database.
Failed processing format-parameters; Python 'float64' cannot be converted to a MySQL type.
Actually, I am extracting some data from my DB and give them as arguments to a function. And, use the value returned by the function for another query. My code is:
import numpy as np
import matplotlib
import skfuzzy as fuzz
from skfuzzy import control as ctrl
from matplotlib import pyplot as plt
import mysql.connector
from mysql.connector import Error
from mysql.connector import errorcode
import time
import datetime
try:
connection = mysql.connector.connect(host='1111',
database='x',
user='x',
password='xx')
cursor = connection.cursor(buffered=True)
anomaly = (a.compute_anomaly(85, 5))
print(anomaly)
test = str(a.get_anomaly(anomaly))[1:-1]
print(test)
t = test.split(',')
anomaly_low = t[0]
anomaly_medium = t[1]
anomaly_high = t[2]
print (anomaly_low )
print (anomaly_medium )
print (anomaly_high )
ts = time.time()
timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d
%H:%M:%S')
#insert data to fuzzy_anomaly
sql_anomaly = "INSERT INTO fuzzy_anomaly(anomaly,anomaly_low,
anomaly_medium, anomaly_high,timestamp) VALUES (%s,%s,%s,%s,%s)"
input = (anomaly, anomaly_low, anomaly_medium, anomaly_high,timestamp)
cursor.execute(sql_anomaly , input, multi=True)
connection.commit()
print("1 record inserted, ID:", cursor.lastrowid)
except mysql.connector.Error as error :
print("Failed to update record to database: {}".format(error))
connection.rollback()
finally:
#closing database connection.
if(connection.is_connected()):
connection.close()
print("MySQL connection is closed")
these are my fields/ columns names in my database. I am trying to insert anomaly, anomaly_low, anomaly_medium, anomaly_high, timestamp data into MYSQL.
But, when I manually give arguments to the compute_anomaly(arg1, arg2) function, and delete the first query (input_fuzzy) the error goes away.
Any idea? Thanks in advance.
I am trying to import mysql data into a .txt file using python 3.x but it look like I'm missing something.The expectation is, data should be imported to a file in tabular/columns format. I tried my level best to get solution but I'm not getting what I need.
Below is my code :
import pymysql.cursors
import pymysql
import sys
import os
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password="",
db='jmeterdb',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Select all records
sql = "select * from emp"
cursor.execute(sql)
# connection is not autocommit by default. So you must commit to save
# your changes.
result = cursor.fetchall()
newfile = open("db-data.txt","a+")
for row in result:
newfile.writelines(row)
print(result)
newfile.close()
finally:
connection.close()
On terminal python shows me data when print(result) is executed but in the db-data.txt file, it shows column-names only.
Expected result :
Column_Name1 Column_Name2 Column_Name3
data1 data2 data3
data1 data2 data3
This code is producing expected output for above question is as below :
import pymysql.cursors
import pymysql
import sys
import os
# Open database connection
connection = pymysql.connect(host='localhost',
user='root',
password="",
db='jmeterdb',
cursorclass=pymysql.cursors.DictCursor)
# prepare a cursor object using cursor() method
with connection.cursor() as cursor:
# Prepare SQL query to select a record into the database.
try:
sql = "SELECT * FROM EMP order by ename asc"
# Execute the SQL command
cursor.execute(sql)
# Fetch all the rows in a list of lists.
results = cursor.fetchall()
# print(results)
if results:
newfile = open("db-data.txt","a+")
newfile.write('ename'+"\t"+'jobs'+"\t"+"salary"+"\t"+'comm'+"\t"+'manager'+"\t"+'hiredate'+"\t"+'deptno'+"\t"+'empno'+"\n")
for index in results:
ltr=[]
ltr.append(index['ename'])
ltr.append(index['job'])
ltr.append(index['sal'])
ltr.append(index['comm'])
ltr.append(index['mgr'])
ltr.append(index['hiredate'])
ltr.append(index['deptno'])
ltr.append(index['empno'])
lenltr=len(ltr)
for i in range(lenltr):
newfile.write('{}'.format(ltr[i]))
newfile.write("\t")
print(ltr[i])
newfile.write("\n")
# # Now print fetched result
#print("ename=%s,empno=%d,job=%d,hiredate=%d,comm=%s,sal=%d,deptno=%d,mgr=%d" %(ename, empno, job, hiredate, comm, sal, deptno, mgr))
# print(index)
except:
print ("Error: unable to fecth data")
# disconnect from server
connection.close()
newfile.close()
The following prints the results properly but no records show up in the mysql table I'm attempting to populate:
#!/usr/bin/python
import MySQLdb
import string
db = MySQLdb.connect(host="localhost",
user="user",
passwd="******",
db="test")
cur=db.cursor()
i = 0
for lt in string.ascii_lowercase:
dbinsert = """insert into dns(domain,A,MX,T,serial,ttl)
values('"""+lt+""".com',' 1.1.1."""+str(i)+"""\\n','10 mx1.somehost.com.\\n',
'# "txt data"\\n',2013092001,300)"""
print dbinsert
i+=1
#cur=db.cursor()
try:
cur.execute(dbinsert)
db.commit
except MySQLdb.Error, e:
print e[0], e[1]
db.rollback()
db.close()
What am I missing here?
db.commit should be db.commit()
This is a very straight forward question regarding how to insert or select data from/to a database ? Since i'm trying to keep my code as clean as possible, this is how i'm actually performing queries and inserts/updates:
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select and insert
# this func should be called like:
# db_call('c:\dbconf.cfg','select * from something')
# or insert / update statement.
#------------------------------------------------------------
def db_call(cfgFile, sql):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
try:
cur.execute(sql)
if re.match(r'INSERT|insert|UPDATE|update|DELETE|delete', sql):
con.commit()
else:
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
else:
con.commit()
finally:
con.close()
But, using this method i have to keep all the queries inside my main class, where that can be a problem if any change happens into the table structure,
But, going for sp call, i can have the code more clean, passing only the sp name and fields. But sometimes this could lead me to have one python function for more specific cases, ( as an example, sp that receives 2,3 or 4 inputs must have diferent python functions for each )
import sys
import MySQLdb
from ConfigParser import SafeConfigParser
#------------------------------------------------------------
# Select only!!!!!!
# this func should be called like:
# db_call('fn_your_function','field_a','field_b')
# or insert / update statement.
#------------------------------------------------------------
def db_call(self, cfgFile, query):
parser = SafeConfigParser()
parser.read(cfgFile)
dbType = parser.get('database', 'db_type')
db_host = parser.get('database', 'db_host')
db_name = parser.get('database', 'db_name')
db_user = parser.get('database', 'db_login')
db_pass = parser.get('database', 'db_pass')
con = MySQLdb.connect(host=db_host, db=db_name,
user=db_user, passwd=db_pass
)
cur = con.cursor()
try:
cur.callproc(query[0], (query[1],query[2]))
data = cur.fetchall()
resultList = []
for data_out in data:
resultList.append(data_out)
return resultList
con.close()
except MySQLdb.Error, e:
con.rollback()
print "Error "
print e.args
sys.exit(1)
Im not sure if here is the right place to ask this, but before voting to close it (if is the case ) please reply with the information where i could ask this kind of question :)
Thanks in advance.
If your goal is to abstract away the schema of your DB from your objects' implementations, you should probably be looking at ORMs/persistence frameworks. There are a number of them in Python. As examples, SQLAlchemy is popular and Django, a popular web framework, has one built in.