Merged with I would like some advice why this would not insert data into my SQL table.
I am receiving this error:
query() argument 1 must be string or read-only buffer, not tuple.
I am not sure what it is after trying to change my code:
def insert_popularity(Category, filename, cursor):
txt_file = file(filename, 'r')
for line in txt_file:
# Split the line on whitespace
number, value = line.split()
# construct the SQL statement
sql = ("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, Category, value))
# execute the query
cursor.execute(sql)
connection = MySQLdb.connect(host='localhost', user='root', \
passwd='password', db='dogs')
cursor = connection.cursor()
Category = 'dogs'
insert_popularity(Category, 'dogs.txt', cursor)
connection.commit()
cursor.close()
connection.close()
Related
I tried to update multiple rows (approx. 350000) with a single query by implementing the following function:
def update_items(rows_to_update):
sql_query = """UPDATE contact as t SET
name = e.name
FROM (VALUES %s) AS e(id, name)
WHERE e.id = t.id;"""
conn = get_db_connection()
cur = conn.cursor()
psycopg2.extras.execute_values (
cur, sql_query, rows_to_update, template=None, page_size=100
)
While trying to run the function above, only 31 records were updated. Then, I tried to update row by row with the following function:
def update_items_row_by_row(rows_to_update):
sql_query = """UPDATE contact SET name = %s WHERE id = %s"""
conn = get_db_connection()
with tqdm(total=len(rows_to_update)) as pbar:
for id, name in rows_to_update:
cur = conn.cursor()
# execute the UPDATE statement
cur.execute(sql_query, (name, id))
# get the number of updated rows
# Commit the changes to the database
conn.commit()
cur.close()
pbar.update(1)
The latter has updated all the records so far but is very slow (estimated to end in 9 hours).
Does anyone know what is the efficient way to update multiple records?
By splitting the list into chunks of size equal to page_size, it worked well:
def update_items(rows_to_update):
sql_query = """UPDATE contact as t SET
name = data.name
FROM (VALUES %s) AS data (id, name)
WHERE t.id = data.id"""
conn = get_db_connection()
cur = conn.cursor()
n = 100
with tqdm(total=len(rows_to_update)) as pbar:
for i in range(0, len(rows_to_update), n):
psycopg2.extras.execute_values (
cur, sql_query, rows_to_update[i:i + n], template=None, page_size=n
)
conn.commit()
pbar.update(cur.rowcount)
cur.close()
conn.close()
The problem with your original function appears to be that you forgot to apply commit. When you execute an insert/update query with psycopg2 a transaction is opened but not finalized until commit is called. See my edits in your function (towards the bottom).
def update_items(rows_to_update):
sql_query = """UPDATE contact as t SET
name = e.name
FROM (VALUES %s) AS e(id, name)
WHERE e.id = t.id;"""
conn = get_db_connection()
cur = conn.cursor()
psycopg2.extras.execute_values(cur, sql_query, rows_to_update)
## solution below ##
conn.commit() # <- We MUST commit to reflect the inserted data
cur.close()
conn.close()
return "success :)"
If you don't want to call conn.commit() each time you create a new cursor, you can use autocommit such as
conn = get_db_connection()
conn.set_session(autocommit=True)
When I executing the following function, I get the following response:
Failed to insert data into sqlite table: no such table: users
user_id = uuid.uuid4()
save_record.name = name
save_record.score = score
last_time = datetime.now()
try:
conn = sqlite3.connect('OnlineJeopardy.db')
cursor = conn.cursor()
print("Successfully Connected to OnlineJeopardy DB.")
sqlite_insert_query = """INSERT INTO users
(User_id, Name, Score, Last_time)
VALUES
(?, ?, ?, ?)"""
add_to_db = cursor.execute(sqlite_insert_query, (user_id, name, score, last_time))
conn.commit()
cursor.close()
except sqlite3.Error as error:
print("Failed to insert data into sqlite table: ", error)
finally:
if (conn):
conn.close()
print("The SQLite connection is closed")
When I execute this query in DB Browser, with actual values instead of placeholders, it all goes well.
I've tried swapping placeholders to actual values (as below) within the query in sqlite3 but the outcome was the same.
conn = sqlite3.connect('OnlineJeopardy.db')
cursor = conn.cursor()
print("Successfully Connected to OnlineJeopardy DB.")
sqlite_insert_query = """INSERT INTO users
(User_id, Name, Score, Last_time)
VALUES
('ID-1337', 'Adam', 20, '2020-06-12 23:18:58')"""
add_to_db = cursor.execute(sqlite_insert_query)
conn.commit()
cursor.close()
I run this code but it doesn't commit anything.
def them_mon(self):
ten_mon = ['Tin học', 'Toán', 'Nhạc', 'Mỹ thuật', 'Sinh', 'Lý', 'Văn', 'Thể dục', 'Sử', 'Địa', 'GDCD', 'TTH', 'AVTH', 'KHKT']
len_tm = len(ten_mon)
i = 0
while i < len_tm:
ten = ten_mon[i]
#print(ten)
sql = "INSERT INTO bang_diem(TEN_MON) VALUES(?)"
self.conn.execute(sql, (ten,))
i+=1
self.conn.commit()
No record is added or anything in bang_diem
You have to execute with cursor object and not the connection object
# Creates or opens a DB
db = sqlite3.connect('data.db')
# Get a cursor object
cursor = db.cursor()
cursor.execute("INSERT INTO tabe_name (column1, column2) VALUES(?,?,?,?)", (column1, column2))
db.commit()
I am trying to import data from excel to MySQl below is my code , problem here is that it only writes the last row from my excel sheet to MySQl db and i want it to import all the rows from my excel sheet.
import pymysql
import xlrd
book = xlrd.open_workbook('C:\SqlExcel\Backup.xlsx')
sheet = book.sheet_by_index(0)
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='test')
cursor = connection.cursor()
query = """INSERT INTO report_table (FirstName, LastName) VALUES (%s, %s)"""
for r in range(1, sheet.nrows):
fname = sheet.cell(r,1).value
lname = sheet.cell(r,2).value
values = (fname, lname)
cursor.execute(query, values)
connection.commit()
cursor.close()
connection.close()
You code is currently only storing the last pair, and writing that to the database. You need to call fname and lname inside the loop and write each pair seperately to the database.
You can ammend your code to this:
import pymysql
import xlrd
book = xlrd.open_workbook('C:\SqlExcel\Backup.xlsx')
sheet = book.sheet_by_index(0)
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='test',
autocommit=True)
cursor = connection.cursor()
query = """INSERT INTO report_table (FirstName, LastName) VALUES (%s, %s)"""
# loop over each row
for r in range(1, sheet.nrows):
# extract each cell
fname = sheet.cell(r,1).value
lname = sheet.cell(r,2).value
# extract cells into pair
values = fname, lname
# write pair to db
cursor.execute(query, values)
# close everything
cursor.close()
connection.close()
Note: You can set autocommit=True in the connect phase. PyMySQL disables autocommit by default. This means you dont have to call cursor.commit() after your query.
Your variable values have to be inside the for instruction like this :
import pymysql
import xlrd
book = xlrd.open_workbook('C:\SqlExcel\Backup.xlsx')
sheet = book.sheet_by_index(0)
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='test')
cursor = connection.cursor()
query = """INSERT INTO report_table (FirstName, LastName) VALUES (%s, %s)"""
for r in range(1, sheet.nrows):
fname = sheet.cell(r,1).value
lname = sheet.cell(r,2).value
values = (fname, lname)
cursor.execute(query, values)
connection.commit()
cursor.close()
connection.close()
Sorry, I don't know much about databases, so nor about pymysql. But assumed all the rest is correct I guess it could work like:
...
cursor = connection.cursor()
query = """INSERT INTO report_table (FirstName, LastName) VALUES (%s, %s)"""
for r in range(1, sheet.nrows):
fname = sheet.cell(r,1).value
lname = sheet.cell(r,2).value
values = (fname, lname)
cursor.execute(query, values)
connection.commit()
cursor.close()
connection.close()
Is this something you will do on a regular basis? I see the script you're writing but I am not sure if this is something you need to run over and over again or if you are just importing the data into MySQL once.
If this is a one shot deal, you can try this.
Open the spreadsheet and SELECT ALL then COPY all your data. Paste it into a text document and save the text document (let's say the text document will be in c:\temp\exceldata.txt). You can then load it all into the table with one command:
LOAD DATA INFILE 'c:/temp/exceldata.txt'
INTO TABLE report_table
FIELDS TERMINATED BY '\t'
LINES TERMINATED BY '\r\n'
IGNORE 1 LINES;
I am making a few assumptions here:
The spreadsheet has only two columns and they are in the same order as the fields in your table.
You do NOT need to clear out the table before the load. If you do, issue the command TRUNCATE TABLE report_table; before the load.
Note, I chose a tab delimited format because I prefer it. You could save the file as a .CSV file and adjust the command as follows:
LOAD DATA INFILE 'c:/temp/exceldata.txt'
INTO TABLE report_table
FIELDS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\r\n'
IGNORE 1 LINES;
The "optionally enclosed by" is there because Excel will put quotes around text data with a comma in it.
If you need to do this on a regular basis, you can still use the CSV method by writing an excel script that saves the file to a .CSV copy whenever the spreadsheet is saved. I have done that too.
I have never written python but this is how I do it in PHP.
HTH
This code worked for me after taking help from the above suggestion the error was of indentation now its working :)
import pymysql
import xlrd
book = xlrd.open_workbook('C:\SqlExcel\Backup.xlsx')
sheet = book.sheet_by_index(0)
# Connect to the database
connection = pymysql.connect(host='localhost',
user='root',
password='',
db='test',
autocommit=True)
cursor = connection.cursor()
query = """INSERT INTO report_table (FirstName, LastName) VALUES (%s, %s)"""
for r in range(1, sheet.nrows):
fname = sheet.cell(r,1).value
lname = sheet.cell(r,2).value
values = (fname, lname)
cursor.execute(query, values)
cursor.close()
connection.close()
Following is my code:
import MySQLdb
def insert_popularity(PersonNumber, Category, Value):
# make a connection to the dataabse
connection = MySQLdb.connect(host='localhost', user='root', \
passwd='password', db='inb104')
# get a cursor on the database
cursor = connection.cursor()
# construct the SQL statement
sql = ("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, category, data))
def open_file(filename):
txt_file = file(filename, 'r')
for line in txt_file:
# Split the line on whitespace
for value in line.split():
return value
number = value[0]
data = value[1]
# execute the query
cursor.execute(sql)
# commit the changes to the database\
connection.commit()
# close the cursor and connection
cursor.close()
connection.close()
Update:
After changing my code as per Paulo's suggestion I now get this error:
query() argument 1 must be string or read-only buffer, not tuple.
I am not sure what it is after trying to change my code:
def insert_popularity(Category, filename, cursor):
txt_file = file(filename, 'r')
for line in txt_file:
# Split the line on whitespace
number, value = line.split()
# construct the SQL statement
sql = ("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, Category, value))
# execute the query
cursor.execute(sql)
connection = MySQLdb.connect(host='localhost', user='root', \
passwd='password', db='dogs')
cursor = connection.cursor()
Category = 'dogs'
insert_popularity(Category, 'dogs.txt', cursor)
connection.commit()
cursor.close()
connection.close()
Just do it simply, one thing at a time, no fancy stuff that is error prone and slows the reader down while they navigate the obfuscation:
sql = """INSERT INTO popularity (PersonNumber, Category, Value) VALUES (%s, %s, %s)"""
args = (number, Category, value)
cursor.execute(sql, args)
Your comment (execute the query) went away because (a) it was wrong (insert != query) and (b) the fixed version (execute the insertion) would be quite redundant given the clarity of the fixed code.
Update after new problem (too many values to unpack):
Instead of this code:
for line in txt_file:
# Split the line on whitespace
number, value = line.split()
do this:
for lino, line in enumerate(txt_file, 1):
pieces = line.split()
if len(pieces) != 2:
print "Bad data in line %d: %r" % (lino, pieces)
continue
number, value = pieces
You've created the query to execute as a tuple. There two possibilities to solve this:
Use the created query (sql) as a list of arguments:
sql = ("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, Category, value))
# execute the query
cursor.execute(*sql)
Directly add the query to the execute method:
cursor.execute("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, Category, value))
Number 2 is definitely a better option than the first one. Thanks to all comments!
What are the data types of the number, category, and data? If any of these are strings, then you should wrap them in single quotes in your query.
Do not take me wrong, but the code is very messed up...
the return inside the for loop will return the first splited string in the first line.
open_file is defined but never called
and so on...
My take would be something like:
def process_file(category, filename, cursor):
txt_file = file(filename, 'r')
for line in txt_file:
number, value = line.split()
sql = ("""INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)""", (number, category, data))
cursor.execute(sql)
connection = MySQLdb.connect(host='localhost', user='root',
passwd='password', db='inb104')
# get a cursor on the database
cursor = connection.cursor()
category = 'foo'
process_file(category, 'somefile.txt', cursor)
# commit the changes to the database\
connection.commit()
# close the cursor and connection
cursor.close()
connection.close()
Try it like this:
def insert_popularity(Category, filename, cursor):
sql = """INSERT INTO popularity (PersonNumber, Category, Value)
VALUES(%s, %s, %s)"""
txt_file = file(filename, 'r')
for line in txt_file:
# Split the line on whitespace
number, value = line.split()
# execute the query
cursor.execute(sql, (number, Category, value))
txt_file.close()
connection = MySQLdb.connect(host='localhost', user='root', \
passwd='password', db='dogs')
cursor = connection.cursor()
Category = 'dogs'
insert_popularity(Category, 'dogs.txt', cursor)
connection.commit()
cursor.close()
connection.close()
Also note: your code suggests this is a MySQL database; if it's an SQLite database, like the title of your question says, please substitute '?' for every '%s' in the sql statement.