Unable to import CSV file data to mysql table - python

I am using the code below-
import mysql.connector
import csv
mydb = mysql.connector.connect(host='xxxxx', user='xxxx', passwd='xxxxxx')
cursor = mydb.cursor()
cursor.execute("SHOW DATABASES")
l = cursor.fetchall()
cursor.execute("USE DB ")
with open('details.csv','rt')as f:
csv_data = csv.reader(f)
for row in csv_data:
cursor.execute ('INSERT INTO student (id,name,age,course) VALUES (%s,%s,%s,%s)',row)
cursor.close()
mydb.commit()
mydb.close()
I am getting the following error-
"ProgrammingError: Not all parameters were used in the SQL statement"
python 3.x,windows7,Mysql 2012 version.

for row in csv_data:
listval = []
for col in row:
listval.append(col)
cursor.execute ('INSERT INTO student (id,name,age,course) VALUES (%s,%s,%s,%s) ', listval)
This will work :))

In this case, the main problem is the string format. I did't know what object is row, in your error it is a list, so using each element will solve the issue.
for row in csv_data:
cursor.execute ('INSERT INTO student (id,name,age,course) VALUES (%s,%s,%s,%s)',
row[0], row[1], row[2], row[3])

Related

Import Data from .csv file into mysql using python

I am trying to import data from two columns of a .csv file (time hh:mm, float). I created a database and a table in mysql.
import mysql.connector
import csv
mydb = mysql.connector.connect(host='127.0.0.1',
user= 'xxx',
passwd='xxx',
db='pv_datenbank')
cursor = mydb.cursor()
# get rid of the '' at the beginning of the .csv file
s = open('Sonneneinstrahlung.csv', mode='r', encoding='utf-8-sig').read()
open('Sonneneinstrahlung.csv', mode='w', encoding='utf-8').write(s)
print(s)
with open('Sonneneinstrahlung.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=';')
sql = """INSERT INTO einstrahlung ('Uhrzeit', 'Einstrahlungsdaten') VALUES (%s, %s)"""
for row in csv_reader:
print(row)
print(cursor.rowcount, "was inserted.")
cursor.executemany(sql, csv_reader)
#cursor.execute(sql, row, multi=True)
mydb.commit()
mydb.close()
If I run the program with executemany(), result is the following:
['01:00', '1']
'-1 was inserted.'
and after this I do get the error code: Not all parameters were used again.
When I try the same thing with the execute() operator, no error is shown, but the data is not inserted in the table of my database.
Here you can see the input data:
executemany takes a statement and a sequence of sets of parameters.
Try this:
with open('Sonneneinstrahlung.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=';')
sql = """INSERT INTO einstrahlung (Uhrzeit, Einstrahlungsdaten) VALUES (%s, %s)"""
cursor.executemany(sql, csv_reader)
mydb.commit()

Inserting csv into MySQL database with python library mysql.connector

I have trouble with insert of csv data into MySQL tabel with mysql.connector .
The code I use looks like this :
import mysql.connector
import csv
andreport = 'testowa.csv'
cnx = mysql.connector.connect(
user='xxxxx',
password='xxxxx',
host='xxxxxx',
database='xxxxx')
cursor = cnx.cursor()
with open(andreport, 'r') as csv_data:
for row in csv_data:
cursor.execute(
"INSERT INTO flex(date, Store, Vendor, Shelf)"
"VALUES({},{},{},{})", row)
cnx.commit()
cursor.close()
cnx.close()
print("Done")
The error I get :
C:\Users\Iw4n\PycharmProjects\Learning\venv\Scripts\python.exe C:/Users/Iw4n/PycharmProjects/Learning/Orange_android_MySQL_insertion.py
Traceback (most recent call last):
File "C:/Users/Iw4n/PycharmProjects/Learning/Orange_android_MySQL_insertion.py", line 15, in <module>
cursor.execute(
File "C:\Users\Iw4n\PycharmProjects\Learning\venv\lib\site-packages\mysql\connector\cursor.py", line 551, in execute
self._handle_result(self._connection.cmd_query(stmt))
File "C:\Users\Iw4n\PycharmProjects\Learning\venv\lib\site-packages\mysql\connector\connection.py", line 490, in cmd_query
result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query))
File "C:\Users\Iw4n\PycharmProjects\Learning\venv\lib\site-packages\mysql\connector\connection.py", line 395, in _handle_result
raise errors.get_exception(packet)
mysql.connector.errors.ProgrammingError: 1064 (42000): You have an error in your SQL syntax; check the manual that corresponds to your MySQL server version for the right syntax to use near '},{},{},{})' at line 1
When i wrapped {} into '' , as many rows as were in csv been inserted into datbase as {},{}
same story goes for %s if I use it , i got the same error as above, when it's wrapped in '' , %s is insetred into database.
I also found information to add f in fron of "INSERT~ but it did not help.
Can anyone give me some suggestion on how to overcome this and correctly insert data to MySQL ?
Final code that is working as intended :
import mysql.connector
import csv
andreport = 'testowa.csv'
cnx = mysql.connector.connect(
user='xxxxx',
password='xxxxx',
host='xxxxx',
database='xxxxx')
cursor = cnx.cursor()
with open(andreport, mode='r') as csv_data:
reader = csv.reader(csv_data, delimiter=';')
csv_data_list = list(reader)
for row in csv_data_list:
cursor.execute("""
INSERT INTO flex(
date, Agency, MediaSource, Campaign)
VALUES(%s,%s,%s,%s)""",
(row[0], row[1], row[2], row[3]))
cnx.commit()
cursor.close()
cnx.close()
print("Done")
I'm guessing that seems the problem is that you passed one argument (row) instead of four. So try this:
cursor.execute("""
INSERT INTO flex(date, Store, Vendor, Shelf)
VALUES(%s,%s,%s,%s)""",(row[0], row[1], row[2], row[3], ))
Looking at the documentation for MySQLCursor.excute() method, it seems like adding some %s as the parameters in your insert statement might fix this?
import mysql.connector
import csv
andreport = 'testowa.csv'
cnx = mysql.connector.connect(
user='xxxxx',
password='xxxxx',
host='xxxxxx',
database='xxxxx')
cursor = cnx.cursor()
insert_statement = (
"INSERT INTO flex(date, Store, Vendor, Shelf)"
"VALUES (%s, %s, %s, %s)"
)
with open(andreport, mode='r') as csv_data:
reader = csv.reader(csv_data, delimiter=';')
csv_data_list = list(reader)
for row in csv_data_list:
cursor.execute(insert_statement, row)
cnx.commit()
cursor.close()
cnx.close()
print("Done")
Let me know if this gets you anywhere, or if you see a new error!
Edit: updated CSV reading to convert to a list.

Incorrect integer value when insert data to mysql database python

I have sample code like this:
import csv
import MySQLdb
mydb = MySQLdb.connect(host='localhost', user='root', passwd='root', db='marbola')
cursor = mydb.cursor()
with open('./Team_Attributes.csv') as csv_data:
csv_reader = csv.reader(csv_data)
next(csv_reader, None)
for row in csv_reader:
insert=(
"INSERT INTO Team_Attributes(id,team_fifa_api_id,team_api_id,date,buildUpPlaySpeed,buildUpPlaySpeedClass,buildUpPlayDribbling,buildUpPlayDribblingClass,buildUpPlayPassing,buildUpPlayPassingClass,buildUpPlayPositioningClass,chanceCreationPassing,chanceCreationPassingClass,chanceCreationCrossing,chanceCreationCrossingClass,chanceCreationShooting,chanceCreationShootingClass,chanceCreationPositioningClass,defencePressure,defencePressureClass,defenceAggression,defenceAggressionClass,defenceTeamWidth,defenceTeamWidthClass,defenceDefenderLineClass)"
+ " VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
)
cursor.execute(insert, row[:25])
mydb.commit()
cursor.close()
print("Done")
I used above code to insert data into mysql database from csv file. The csv file is the import result from sqlite database of football analysis from kaggle.com. But there is an error:
_mysql_exceptions.OperationalError: (1366, "Incorrect integer value: '' for column 'buildUpPlayDribbling' at row 1")
This is the csv sample image:
Whats wrong with the code? How to solve this error? I used python 3.5

csv into sqlite table python

Using python, I am trying to import a csv into an sqlite table and use the headers in the csv file to become the headers in the sqlite table. The code runs but the table "MyTable" does not appear to be created. Here is the code:
with open ('dict_output.csv', 'r') as f:
reader = csv.reader(f)
columns = next(reader)
#Strips white space in header
columns = [h.strip() for h in columns]
#reader = csv.DictReader(f, fieldnames=columns)
for row in reader:
print(row)
con = sqlite3.connect("city_spec.db")
cursor = con.cursor()
#Inserts data from csv into table in sql database.
query = 'insert into MyTable({0}) values ({1})'
query = query.format(','.join(columns), ','.join('?' * len(columns)))
print(query)
cursor = con.cursor()
for row in reader:
cursor.execute(query, row)
#cursor.commit()
con.commit()
con.close()
Thanks in advance for any help.
You can use Pandas to make this easy (you may need to pip install pandas first):
import sqlite3
import pandas as pd
# load data
df = pd.read_csv('dict_output.csv')
# strip whitespace from headers
df.columns = df.columns.str.strip()
con = sqlite3.connect("city_spec.db")
# drop data into database
df.to_sql("MyTable", con)
con.close()
Pandas will do all of the hard work for you, including create the actual table!
You haven't marked your answer solved yet so here goes.
Connect to the database just once, and create a cursor just once.
You can read the csv records only once.
I've added code that creates a crude form of the database table based on the column names alone. Again, this is done just once in the loop.
Your insertion code works fine.
import sqlite3
import csv
con = sqlite3.connect("city_spec.sqlite") ## these statements belong outside the loop
cursor = con.cursor() ## execute them just once
first = True
with open ('dict_output.csv', 'r') as f:
reader = csv.reader(f)
columns = next(reader)
columns = [h.strip() for h in columns]
if first:
sql = 'CREATE TABLE IF NOT EXISTS MyTable (%s)' % ', '.join(['%s text'%column for column in columns])
print (sql)
cursor.execute(sql)
first = False
#~ for row in reader: ## we will read the rows later in the loop
#~ print(row)
query = 'insert into MyTable({0}) values ({1})'
query = query.format(','.join(columns), ','.join('?' * len(columns)))
print(query)
cursor = con.cursor()
for row in reader:
cursor.execute(query, row)
con.commit()
con.close()
You can also do it easy with peewee orm. For this you only use an extension from peewee, the playhouse.csv_loader:
from playhouse.csv_loader import *
db = SqliteDatabase('city_spec.db')
Test = load_csv(db, 'dict_output.csv')
You created the database city_spec.db with the headers as fields and the data from the dict_output.csv
If you don't have peewee you can install it with
pip install peewee

Writing a csv file into SQL Server database using python

I am trying to write a csv file into a table in SQL Server database using python. I am facing errors when I pass the parameters , but I don't face any error when I do it manually. Here is the code I am executing.
cur=cnxn.cursor() # Get the cursor
csv_data = csv.reader(file(Samplefile.csv')) # Read the csv
for rows in csv_data: # Iterate through csv
cur.execute("INSERT INTO MyTable(Col1,Col2,Col3,Col4) VALUES (?,?,?,?)",rows)
cnxn.commit()
Error:
pyodbc.DataError: ('22001', '[22001] [Microsoft][ODBC SQL Server Driver][SQL Server]String or binary data would be truncated. (8152) (SQLExecDirectW); [01000] [Microsoft][ODBC SQL Server Driver][SQL Server]The statement has been terminated. (3621)')
However when I insert the values manually. It works fine
cur.execute("INSERT INTO MyTable(Col1,Col2,Col3,Col4) VALUES (?,?,?,?)",'A','B','C','D')
I have ensured that the TABLE is there in the database, data types are consistent with the data I am passing. Connection and cursor are also correct. The data type of rows is "list"
Consider building the query dynamically to ensure the number of placeholders matches your table and CSV file format. Then it's just a matter of ensuring your table and CSV file are correct, instead of checking that you typed enough ? placeholders in your code.
The following example assumes
CSV file contains column names in the first line
Connection is already built
File name is test.csv
Table name is MyTable
Python 3
...
with open ('test.csv', 'r') as f:
reader = csv.reader(f)
columns = next(reader)
query = 'insert into MyTable({0}) values ({1})'
query = query.format(','.join(columns), ','.join('?' * len(columns)))
cursor = connection.cursor()
for data in reader:
cursor.execute(query, data)
cursor.commit()
If column names are not included in the file:
...
with open ('test.csv', 'r') as f:
reader = csv.reader(f)
data = next(reader)
query = 'insert into MyTable values ({0})'
query = query.format(','.join('?' * len(data)))
cursor = connection.cursor()
cursor.execute(query, data)
for data in reader:
cursor.execute(query, data)
cursor.commit()
I modified the code written above by Brian as follows since the one posted above wouldn't work on the delimited files that I was trying to upload. The line row.pop() can also be ignored as it was necessary only for the set of files that I was trying to upload.
import csv
def upload_table(path, filename, delim, cursor):
"""
Function to upload flat file to sqlserver
"""
tbl = filename.split('.')[0]
cnt = 0
with open (path + filename, 'r') as f:
reader = csv.reader(f, delimiter=delim)
for row in reader:
row.pop() # can be commented out
row = ['NULL' if val == '' else val for val in row]
row = [x.replace("'", "''") for x in row]
out = "'" + "', '".join(str(item) for item in row) + "'"
out = out.replace("'NULL'", 'NULL')
query = "INSERT INTO " + tbl + " VALUES (" + out + ")"
cursor.execute(query)
cnt = cnt + 1
if cnt % 10000 == 0:
cursor.commit()
cursor.commit()
print("Uploaded " + str(cnt) + " rows into table " + tbl + ".")
You can pass the columns as arguments. For example:
for rows in csv_data: # Iterate through csv
cur.execute("INSERT INTO MyTable(Col1,Col2,Col3,Col4) VALUES (?,?,?,?)", *rows)
If you are using MySqlHook in airflow , if cursor.execute() with params throw san error
TypeError: not all arguments converted during string formatting
use %s instead of ?
with open('/usr/local/airflow/files/ifsc_details.csv','r') as csv_file:
csv_reader = csv.reader(csv_file)
columns = next(csv_reader)
query = '''insert into ifsc_details({0}) values({1});'''
query = query.format(','.join(columns), ','.join(['%s'] * len(columns)))
mysql = MySqlHook(mysql_conn_id='local_mysql')
conn = mysql.get_conn()
cursor = conn.cursor()
for data in csv_reader:
cursor.execute(query, data)
cursor.commit()
I got it sorted out. The error was due to the size restriction restriction of table. It changed the column capacity like from col1 varchar(10) to col1 varchar(35) etc. Now it's working fine.
Here is the script and hope this works for you:
import pandas as pd
import pyodbc as pc
connection_string = "Driver=SQL Server;Server=localhost;Database={0};Trusted_Connection=Yes;"
cnxn = pc.connect(connection_string.format("DataBaseNameHere"), autocommit=True)
cur=cnxn.cursor()
df= pd.read_csv("your_filepath_and_filename_here.csv").fillna('')
query = 'insert into TableName({0}) values ({1})'
query = query.format(','.join(df.columns), ','.join('?' * len(df1.columns)))
cur.fast_executemany = True
cur.executemany(query, df.values.tolist())
cnxn.close()
You can also import data into SQL by using either:
The SQL Server Import and Export Wizard
SQL Server Integration Services (SSIS)
The OPENROWSET function
More details can be found on this webpage:
https://learn.microsoft.com/en-us/sql/relational-databases/import-export/import-data-from-excel-to-sql?view=sql-server-2017

Categories