Problem with inserting into MySQL database from Python - python

I am having trouble inserting a record into a MySQL database from python. This is what I am doing.
def testMain2():
conn = MySQLdb.connect(charset='utf8', host="localhost", user="root", passwd="root", db="epf")
cursor = conn.cursor()
tableName = "test_table"
columnsDef = "(export_date BIGINT, storefront_id INT, genre_id INT, album_id INT, album_rank INT)"
exStr = """CREATE TABLE %s %s""" % (tableName, columnsDef)
cursor.execute(exStr)
#Escape the record
values = ["1305104402172", "12", "34", "56", "78"]
values = [conn.literal(aField) for aField in values]
stringList = "(%s)" % (", ".join(values))
columns = "(export_date, storefront_id, genre_id, album_id, album_rank)"
insertStmt = """INSERT INTO %s %s VALUES %s""" % (tableName, columns, stringList)
cursor.execute(insertStmt)
cursor.close()
conn.close()
The table is created however nothing is in the table. I can run the INSERT statement successfully in Terminal with the same credentials.
Any suggestions on what I may be doing wrong?

You haven't committed the transaction.
conn.commit()
(The MySQLdb library sets autocommit to False when connecting to MySQL. This means that you need to manually call commit or your changes will never make it into the database.)

Related

how to UPDATE SQL database dynamically using python?

import mysql.connector
db = mysql.connector.connect(host="localhost", user="1234", passwd="1234", database="books1")
mycursor = db.cursor()
label = '202'
position = 'A1'
sql2 = """INSERT INTO info (label, position) VALUES (%s, %s)"""
db2 = (label, position)
mycursor.execute(sql2, db2)
print("Updated")
When I run code it says updated but whenever I checked and refresh my sql database the old table appears.
How do I update here my latest input data?
You have to commit the transaction.
Add db.commit() after mycursor.execute()

How to insert a list in postgres through python

I have a list
A=[1,2,3,3,4,5,6,8,90,8,6,5]
I want to put this list into a postgres table
After making a cursor and connection
I tried this
for i in A:
cusror.execute("Insert into schema1.table1 Values (%s)" ,i)
connection.commit()
But getting an error
TypeError: Not all arguments converted during string formatting.
Can someone help me out please
Use this function I will provide, just make sure to change TABLE_NAME and the columns for the ones you will be inserting:
import psycopg2
def bulkInsert(records):
try:
connection = psycopg2.connect(user="sysadmin",
password="pynative##29",
host="127.0.0.1",
port="5432",
database="postgres_db")
cursor = connection.cursor()
# Here replace the table and the columns
sql_insert_query = """ INSERT INTO TABLE_NAME (id, model, price)
VALUES (%s,%s,%s) """
# executemany() to insert multiple rows
result = cursor.executemany(sql_insert_query, records)
connection.commit()
print(cursor.rowcount, "Record inserted successfully into mobile table")
except (Exception, psycopg2.Error) as error:
print("Failed inserting record into mobile table {}".format(error))
finally:
# closing database connection.
if connection:
cursor.close()
connection.close()
print("PostgreSQL connection is closed")
# Example of how to use the function
records_to_insert = [(4, 'LG', 800), (5, 'One Plus 6', 950)]
bulkInsert(records_to_insert)
The 2nd argument to cursor.execute() should be a tuple. Try:
for i in A:
cursor.execute("Insert into schema1.table1 Values (%s)", (i,))
connection.commit()
Noting down a point from documentation -
For positional variables binding, the second argument must always be a sequence, even if it contains a single variable (remember that Python requires a comma to create a single element tuple):
>>> cur.execute("INSERT INTO foo VALUES (%s)", "bar") # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar")) # WRONG
>>> cur.execute("INSERT INTO foo VALUES (%s)", ("bar",)) # correct
>>> cur.execute("INSERT INTO foo VALUES (%s)", ["bar"]) # correct
Correct answer for your issue would be
for i in A:
cusror.execute("Insert into schema1.table1 Values (%s)" ,(i,))
connection.commit()
Using psycopg2 Fast execution helpers:
import psycopg2
from psycopg2.extras import execute_batch,execute_values
con = psycopg2.connect("dbname=test user=postgres host=localhost port=5432")
cur = con.cursor
A=[1,2,3,3,4,5,6,8,90,8,6,5]
param_list = [[id] for id in A]
#Using execute_batch
sql = "Insert into public.table1 values (%s)"
execute_batch(cur, sql, param_list)
con.commit()
#Using execute_values
sql = "Insert into public.table1 values %s"
execute_values(cur, sql, param_list)
con.commit()

How can I let the id be auto generated in my database?

Here is my code:
import sqlite3
def insert(fields=(), values=()):
connection = sqlite3.connect('database.db')
# g.db is the database connection
cur = connection.cursor()
query = 'INSERT INTO this_database (%s) VALUES (%s)' % (
', '.join(fields),
', '.join(['?'] * len(values))
)
cur.execute(query, values)
connection.commit()
id = cur.lastrowid
cur.close()
print (id)
test example:
insert(fields = ("id", "file_name", "url", "time", "type", "description"), values = (2, "file1", "wwww.test.com", "1", "photo", "my first database test"))
I don't want to give the id manually.
I want it to add it+1 automatically.
How can I do that?
You have an INTEGER PRIMARY KEY column, which, if you leave it out when inserting items, automatically increments:
INSERT INTO this_database(file_name, url, time, type, description)
VALUES (?,?,?,?,?)
Since id is omitted, every time you insert a value using the above statement, it's automatically assigned a number by sqlite.
The documentation explaining this.

python MySQL update specific column fetchall()

I'm new to python and I want to update every record that has count 0 in the database. I have tried a lot can't find anything like help.
for row in cur.fetchall():
if row[3] == 0:
cur.execute("UPDATE tble SET count = 1 WHERE name = %s" %row[1])
Assuming your table has this structure:
CREATE TABLE `test` (
`sno` int(11) NOT NULL,
`name` varchar(50) NOT NULL,
`count` int(11) NOT NULL,
`dtCreated` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP
);
Here is the simple code code-
import pymysql
conn = pymysql.connect(host='localhost', unix_socket='', user='USER', passwd='PASSWORD', db='DATABASENAME')
cur = conn.cursor()
cur.execute("SELECT * FROM test")
for r in cur:
curr = conn.cursor()
sql = """UPDATE test SET count = 1 WHERE name = '%s'""" % r[1]
# print(sql)
try:
# Execute the SQL command
curr.execute(sql)
# Commit your changes in the database
conn.commit()
except:
# Rollback in case there is any error
conn.rollback()
curr.close()
cur.close()
conn.close()
Also, since you mentioned that you are new to python remember to commit, every time, whenever you run INSERT, UPDATE or DELETE like queries.
Hope it helps.

psycopg2 postgres database syntax error near value

I am trying to insert info from a pandas DataFrame into a database table by using a function that I wrote:
def insert(table_name="", name="", genere="", year=1, impd_rating=float(1)):
conn = psycopg2.connect("dbname='database1' user='postgres' password='postgres333' host='localhost' port=5433 ")
cur = conn.cursor()
cur.execute("INSERT INTO %s VALUES %s,%s,%s,%s" % (table_name, name, genere, year, impd_rating))
conn.commit()
conn.close()
When I try to use this function like this:
b=0
for row in DF['id']:
insert(impd_rating=float(DF['idbm_rating'][b]),
year=int(DF['year'][b]),
name=str(DF['name'][b]),
genere=str(DF['genere'][b]),
table_name='test_movies')
b = b+1
I get the following syntax error:
SyntaxError: invalid syntax
PS D:\tito\scripts\database training> python .\postgres_script.py
Traceback (most recent call last):
File ".\postgres_script.py", line 56, in <module>insert (impd_rating=float(DF['idbm_rating'][b]),year=int(DF['year'][b]),name=str(DF['name'][b]),genere=str(DF['genere'][b]),table_name='test_movies')
File ".\postgres_script.py", line 15, in insert
cur.execute("INSERT INTO %s VALUES %s,%s,%s,%s"  % (table_name ,name ,genere , year,impd_rating))
psycopg2.ProgrammingError: syntax error at or near "Avatar"
LINE 1: INSERT INTO test_movies VALUES Avatar,action,2009,7.9
I also tried to change the str replacement method from %s to .format()
but I had the same error.
The error message is explicit, this SQL command is wrong at Avatar: INSERT INTO test_movies VALUES Avatar,action,2009,7.9. Simply because values must be enclosed in parenthesis, and character strings must be quoted, so the correct SQL is:
INSERT INTO test_movies VALUES ('Avatar','action',2009,7.9)
But building a full SQL command by concatenating parameters is bad practice (*), only the table name should be directly inserted into the command because is is not a SQL parameter. The correct way is to use a parameterized query:
cur.execute("INSERT INTO %s VALUES (?,?,?,?)" % (table_name,) ,(name ,genere , year,impd_rating)))
(*) It was the cause of numerous SQL injection flaws because if one of the parameter contains a semicolumn (;) what comes after could be interpreted as a new command
Pandas has a DataFrame method for this, to_sql:
# Only needs to be executed once.
conn=psycopg2.connect("dbname='database1' user='postgres' password='postgres333' host='localhost' port=5433 ")
df.to_sql('test_movies', con=conn, if_exists='append', index=False)
This should hopefully get you going in the right direction.
In your original query
INSERT INTO %s VALUES %s,%s,%s,%s
there is a sql problem: you need braces around the values, i.e. it should be VALUES (%s, %s, %s, %s). On top of that the table name cannot be merged as a parameter, or it would be escaped as a string, which is not what you want.
You can use the psycopg 2.7 sql module to merge the table name to the query, with placeholders for the values:
from psycopg2 import sql
query = sql.SQL("INSERT INTO {} VALUES (%s, %s, %s, %s)").format(
sql.Identifier('test_movies'))
cur.execute(query, ('Avatar','action',2009,7.9))
This will make secure both merging the table name and the arguments to the query.
Hello mohamed mahrous,
First install psycopg2 package for the access access PostgreSQL database.
Try this below code,
import psycopg2
conn=psycopg2.connect("dbname='database1' user='postgres' password='postgres333' host='localhost' port=5433 ")
cur=conn.cursor()
def insert(table_name,name,genere,year,impd_rating):
query = "INSERT INTO "+table_name+"(name,genere,year,impd_rating) VALUES(%s,%s,%s,%s)"
try:
print query
cur.execute(query,(name,genere,year,impd_rating))
except Exception, e:
print "Not execute..."
conn.commit()
b=0
for row in DF['id']:
insert (impd_rating=float(DF['idbm_rating'][b]),year=int(DF['year'][b]),name=str(DF['name'][b]),genere=str(DF['genere'][b]),table_name='test_movies')
b= b+1
conn.close()
Example,
import psycopg2
conn=psycopg2.connect("dbname='database1' user='postgres' password='postgres333' host='localhost' port=5433 ")
cur=conn.cursor()
def insert(table_name,name,genere,year,impd_rating):
query = "INSERT INTO "+table_name+"(name,genere,year,impd_rating) VALUES(%s,%s,%s,%s)"
try:
print query
cur.execute(query,(name,genere,year,impd_rating))
except Exception, e:
print "Not execute"
conn.commit()
b=0
for row in DF['id']:
insert (impd_rating="7.0",year="2017",name="Er Ceo Vora Mayur",genere="etc",table_name="test_movies")
b= b+1
conn.close()
I hope my answer is helpful.
If any query so comment please.
i found a solution for my issue by using sqlalchemy and pandas to_sql method
thanks for help everyone
from sqlalchemy import *
import pandas as pd
def connect(user, password, db, host='localhost', port=5433):
'''Returns a connection and a metadata object'''
# We connect with the help of the PostgreSQL URL
# postgresql://federer:grandestslam#localhost:5432/tennis
url = 'postgresql://{}:{}#{}:{}/{}'
url = url.format(user, password, host, port, db)
# The return value of create_engine() is our connection object
con = sqlalchemy.create_engine(url, client_encoding='utf8')
# We then bind the connection to MetaData()
meta = sqlalchemy.MetaData(bind=con, reflect=True)
return con, meta
con, meta = connect('postgres','postgres333','database1')
movies= Table('test',meta,
Column('id',Integer,primary_key=True),
Column('name',String),
Column('genere',String),
Column('year',Integer),
Column('idbm_rating',REAL))
meta.create_all(con)
DF=pd.read_csv('new_movies.txt',sep=' ',engine='python')
DF.columns=('id','name' ,'genere' ,'year' ,'idbm_rating' )
DF.to_sql('movies', con=con, if_exists='append', index=False)

Categories