Using python in postgres and this error pops up - python

Getting this error:
Connection successful
Database Created Successfully
Connected to babynames database.
Traceback (most recent call last):
File "C:/Users/Joseph/Desktop/INFO153 Final/Info153PartII.py", line 44, in <module>
cur.execute('insert into boys values ({},{},{},{});'.format(row.tolist()[0],"'"+row.tolist()[1]+"'",row.tolist()[2],row.tolist()[3]))
TypeError: can only concatenate str (not "float") to str
import pandas as pd
df_boys = pd.read_csv('babynames.txt', delimiter=' ',header=None, usecols=(0,1,2,3) )
df_boys.columns = ["Rank", "Name", "Total Babies", "Total Percentage"]
df_girls = pd.read_csv('babynames.txt', delimiter=' ',header=None, usecols=(0,4,5,6) )
df_girls.columns = ["Rank", "Name", "Total Babies", "Total Percentage"]
#check the imported data
df_boys.head()
df_girls.head()
#psycopg2 library for connecting to PostgreSQL database
import psycopg2
conn = psycopg2.connect(database="postgres", user="postgres", password="postgres", host="127.0.0.1", port="5432")
conn.autocommit = True
print ("Connection successful")
#Create babynames database
cur = conn.cursor()
cur.execute('CREATE DATABASE babynames;')
print ("Database Created Successfully")
#connect to babynames database
conn_babynames = psycopg2.connect(database="babynames", user="postgres", password="postgres", host="127.0.0.1", port="5432")
conn_babynames.autocommit = True
print ("Connected to babynames database.")
cur = conn_babynames.cursor()
#create tables for boys and girls
cur.execute('CREATE TABLE Boys (Rank integer, Name varchar(25), Total_Babies float, Total_Percentage float);')
cur.execute('CREATE TABLE Girls (Rank integer, Name varchar(25), Total_Babies float, Total_Percentage float);')
#import data from dataframe to table using insert into statement
for i, row in df_boys.iterrows():
cur.execute('insert into boys values ({},{},{},{});'.format(row.tolist()[0],"'"+row.tolist()[1]+"'",row.tolist()[2],row.tolist()[3]))
for i, row in df_girls.iterrows():
cur.execute('insert into girls values ({},{},{},{});'.format(row.tolist()[0],"'"+row.tolist()[1]+"'",row.tolist()[2],row.tolist()[3]))
#execute sql queries and display result of analysis
cur.execute("select sum(total) as t_children from ( select sum(Total_Babies) as total from boys union select sum(Total_Babies) as total from girls) as t")
total_babies=cur.fetchone()
print("Total babies born that year = ",total_babies[0])
cur.execute("select total from ( select sum(Total_Babies) as total from boys union select sum(Total_Babies) as total from girls) as t")
babies=cur.fetchall()
if (babies[0]>babies[1]):
print("More boys were born in that year")
else:
print("More girls were born in that year")
cur.execute("select name from boys where Total_Babies> {}".format(total_babies[0]/2))
babies=cur.fetchall()
print(babies)
cur.execute("select name from girls where Total_Babies> {}".format(total_babies[0]/2))
babies=cur.fetchall()
print(babies)```

I believe the error is in "'"+row.tolist()[1]+"'" in your insert. As the error states, there's an issue trying to concatenate a float to a string. You should just be able to wrap the float in a str(), so all the concatenations (+) will be strings:
"'"+str(row.tolist()[1])+"'"
As some have pointed out, it is considered poor / dangerous practice to take strings from somewhere and concatenate them into your SQL queries. Malicious strings can wreck your database. My answer was for your question about why you got the error, but you would do well to look up parameterized SQL and how to implement using it with psycopg2

Related

importing single .csv into mysql with python

when running this code i am getting a Error while connecting to MySQL Not all parameters were used in the SQL statement
I have tried also to ingest these with another technique
import mysql.connector as msql
from mysql.connector import Error
import pandas as pd
empdata = pd.read_csv('path_to_file', index_col=False, delimiter = ',')
empdata.head()
try:
conn = msql.connect(host='localhost', user='test345',
password='test123')
if conn.is_connected():
cursor = conn.cursor()
cursor.execute("CREATE DATABASE timetheft")
print("Database is created")
except Error as e:
print("Error while connecting to MySQL", e)
try:
conn = msql.connect(host='localhost', database='timetheft', user='test345', password='test123')
if conn.is_connected():
cursor = conn.cursor()
cursor.execute("select database();")
record = cursor.fetchone()
print("You're connected to database: ", record)
cursor.execute('DROP TABLE IF EXISTS company;')
print('Creating table....')
create_contracts_table = """
CREATE TABLE company ( ID VARCHAR(40) PRIMARY KEY,
Company_Name VARCHAR(40),
Country VARCHAR(40),
City VARCHAR(40),
Email VARCHAR(40),
Industry VARCHAR(30),
Employees VARCHAR(30)
);
"""
cursor.execute(create_company_table)
print("Table is created....")
for i,row in empdata.iterrows():
sql = "INSERT INTO timetheft.company VALUES (%S, %S, %S, %S, %S,%S,%S,%S)"
cursor.execute(sql, tuple(row))
print("Record inserted")
# the connection is not auto committed by default, so we must commit to save our changes
conn.commit()
except Error as e:
print("Error while connecting to MySQL", e)
second technique I tried
LOAD DATA LOCAL INFILE 'path_to_file'
INTO TABLE copmany
FIELDS TERMINATED BY ';'
ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES;
worked better but many errors. only 20% of rows ingested.
Finally here is an excerpt from the .csv (data is consistent throughout all 1K rows)
"ID";"Company_Name";"Country";"City";"Email";"Industry";"Employees"
217520699;"Enim Corp.";"Germany";"Bamberg";"posuere#diamvel.edu";"Internet";"51-100"
352428999;"Lacus Vestibulum Consulting";"Germany";"Villingen-Schwenningen";"egestas#lacusEtiambibendum.org";"Food Production";"100-500"
371718299;"Dictum Ultricies Ltd";"Germany";"Anklam";"convallis.erat#sempercursus.co.uk";"Primary/Secondary Education";"100-500"
676789799;"A Consulting";"Germany";"Andernach";"massa#etrisusQuisque.ca";"Government Relations";"100-500"
718526699;"Odio LLP";"Germany";"Eisenhüttenstadt";"Quisque.varius#euismod.org";"E-Learning";"11-50"
I fixed these issues to get the code to work:
make the number of placeholders in the insert statement equal to the number of columns
the placeholders should be lower-case '%s'
the cell delimiter appears to be a semi-colon, not a comma.
For simply reading a csv with ~1000 rows Pandas is overkill (and iterrows seems not to behave as you expect). I've used the csv module from the standard library instead.
import csv
...
sql = "INSERT INTO company VALUES (%s, %s, %s, %s, %s, %s, %s)"
with open("67359903.csv", "r", newline="") as f:
reader = csv.reader(f, delimiter=";")
# Skip the header row.
next(reader)
# For large files it may be more efficient to commit
# rows in batches.
cursor.executemany(sql, reader)
conn.commit()
If using the csv module is not convenient, the dataframe's itertuples method may be used to iterate over the data:
empdata = pd.read_csv('67359903.csv', index_col=False, delimiter=';')
for tuple_ in empdata.itertuples(index=False):
cursor.execute(sql, tuple_)
conn.commit()
Or the dataframe can be dumped to the database directly.
import sqlalchemy as sa
engine = sa.create_engine('mysql+mysqlconnector:///test')
empdata.to_sql('company', engine, index=False, if_exists='replace')

MySQL (python 3.6) - SELECT * FROM table - returns number of rows instead of table

I am working with MySQL but I have some unexpected behaviour.
I have past experience with SQLite but I guess I am missing something here.
Using the query SELECT * FROM tableName I would expect the content of the table to be the output.
Instead I get an int, being the count of rows in the table.
Here is the piece of code I am using.
import MySQLdb
conn=MySQLdb.connect(host="xxx",user="xxx",passwd="xxx")
cursor = conn.cursor()
cursor.execute("create database if not exists Test;")
cursor.execute("use Test;")
cursor.execute("create table if not exists City (id int not null primary key auto_increment, city varchar(50), unique(city));")
cursor.execute("insert into City (city) values ('Firenze');")
cursor.execute("insert into City (city) values ('Roma');")
conn.commit()
print(cursor.execute("select city from City;"))
I would expect to get:
Firenze
Roma
Instead I get:
2
If I run the same query from a SQL client I get the expected output. Any clever idea?
Thanks :)
You are missing the FetchAll() function in your code.
Fetch all is nothing but fetching the data of last executed statement.
import MySQLdb
conn=MySQLdb.connect(host="xxx",user="xxx",passwd="xxx")
cursor = conn.cursor()
cursor.execute("create database if not exists Test;")
cursor.execute("use Test;")
cursor.execute("create table if not exists City (id int not null primary key
auto_increment, city varchar(50), unique(city));")
cursor.execute("insert into City (city) values ('Firenze');")
cursor.execute("insert into City (city) values ('Roma');")
conn.commit()
print(cursor.execute("select city from City;"))
myresult = mycursor.fetchall()
for x in myresult:
print(x)
The thing is print(cursor.execute("select city from City;") returns you the number of rows or rows count.
For the complete records use something like this
myresult = cursor.fetchall()
for x in myresult:
print(x)

Importing data from an excel file using python into SQL Server

I have found some other questions that have a similar error to what I am getting, but have not been able to figure out how to resolve this based on the answers. I am trying to import an excel file into SQL Server with the help of python. This is the code I wrote:
import pandas as pd
import numpy as np
import pandas.io.sql
import pyodbc
import xlrd
server = "won't disclose private info"
db = 'private info'
conn = pyodbc.connect('DRIVER={SQL Server};SERVER=' + Server + ';DATABASE=' +
db + ';Trusted_Connection=yes')
cursor = conn.cursor()
book = xlrd.open_workbook("Daily Flash.xlsx")
sheet = book.sheet_by_name("Sheet1")
query1 = """CREATE TABLE [LEAF].[MK] ([LEAF][Lease_Number] varchar(255),
[LEAF][Start_Date] varchar(255), [LEAF][Report_Status] varchar(255), [LEAF]
[Status_Date] varchar(255), [LEAF][Current_Status] varchar(255), [LEAF]
[Sales_Rep] varchar(255), [LEAF][Customer_Name] varchar(255),[LEAF]
[Total_Finance] varchar(255),
[LEAF][Rate_Class] varchar(255) ,[LEAF][Supplier_Name] varchar(255) ,[LEAF]
[DecisionStatus] varchar(255))"""
query = """INSERT INTO [LEAF].[MK] (Lease_Number, Start_Date, Report_Status,
Status_Date, Current_Status, Sales_Rep, Customer_Name,Total_Finance,
Rate_Class,Supplier_Name,DecisionStatus) VALUES (%s, %s, %s, %s, %s, %s, %s,
%s, %s, %s, %s)"""
for r in range(1, sheet.nrows):
Lease_Number = sheet.cell(r,0).value
Start_Date = sheet.cell(r,1).value
Report_Status = sheet.cell(r,2).value
Status_Date = sheet.cell(r,3).value
Current_Status= sheet.cell(r,4).value
Sales_Rep = sheet.cell(r,5).value
Customer_Name = sheet.cell(r,6).value
Total_Financed= sheet.cell(r,7).value
Rate_Class = sheet.cell(r,8).value
Supplier_Name = sheet.cell(r,9).value
DecisionStatus= sheet.cell(r,10).value
values = (Lease_Number, Start_Date, Report_Status, Status_Date,
Current_Status, Sales_Rep, Customer_Name, Total_Financed, Rate_Class,
Supplier_Name, DecisionStatus)
cursor.execute(query1)
cursor.execute(query, values)
database.commit()
database.close()
database.commit()
The error message I get is:
ProgrammingError Traceback (most recent call last)
<ipython-input-24-c525ebf0af73> in <module>()
16
17 # Execute sql Query
---> 18 cursor.execute(query, values)
19
20 # Commit the transaction
ProgrammingError: ('The SQL contains 0 parameter markers, but 11 parameters
were supplied', 'HY000')
Can someone please explain the problem to me and how I can fix it? Thank you!
Update:
I have gotten that error message to go away based on the comments below. I modified my query also because the table into which I am trying to insert values into was not previously created, so I updated my code in an attempt to create it.
However, now I am getting the error message:
ProgrammingError: ('42000', '[42000] [Microsoft][ODBC SQL Server Driver][SQL
Server]The specified schema name "dbo" either does not exist or you do not
have permission to use it. (2760) (SQLExecDirectW)')
I tried changing that slightly by writing CREATE [HELLO][MK] instead of just CREATE MK but that tells me that MK is already in the database... What steps should I take next?
Based on the conversation we had in our chat, here are a few takeaways:
After executing your CREATE TABLE query, make sure to commit it immediately before running any subsequent INSERT queries.
Use error catching for cases when the table already exists in the database. You asked that if you wanted to import more data to the table, would the script still run. The answer is no, since Python will throw an exception at cursor.execute(query1).
If you want to validate whether your insert operations were successful, you can do a simple record count check.
EDIT
Yesterday, when I had #mkheifetz test my code out, he caught a minor bug where the validation check would return False, and the reason was because the database already had existing records, so when comparing against only the current data being imported, the validation would fail. Therefore, as a solution to address the bug, I have modified the code again.
Below is how I would modify your code:
import pandas as pd
import numpy as np
import seaborn as sns
import scipy.stats as stats
import matplotlib.pyplot as plt
import pandas.io.sql
import pyodbc
import xlrd
server = 'XXXXX'
db = 'XXXXXdb'
# create Connection and Cursor objects
conn = pyodbc.connect('DRIVER={SQL Server};SERVER=' + server + ';DATABASE=' + db + ';Trusted_Connection=yes')
cursor = conn.cursor()
# read data
data = pd.read_excel('Flash Daily Apps through 070918.xls')
# rename columns
data = data.rename(columns={'Lease Number': 'Lease_Number',
'Start Date': 'Start_Date',
'Report Status': 'Report_Status',
'Status Date': 'Status_Date',
'Current Status': 'Current_Status',
'Sales Rep': 'Sales_Rep',
'Customer Name': 'Customer_Name',
'Total Financed': 'Total_Financed',
'Rate Class': 'Rate_Class',
'Supplier Name': 'Supplier_Name'})
# export
data.to_excel('Daily Flash.xlsx', index=False)
# Open the workbook and define the worksheet
book = xlrd.open_workbook("Daily Flash.xlsx")
sheet = book.sheet_by_name("Sheet1")
query1 = """
CREATE TABLE [LEAF].[ZZZ] (
Lease_Number varchar(255),
Start_Date varchar(255),
Report_Status varchar(255),
Status_Date varchar(255),
Current_Status varchar(255),
Sales_Rep varchar(255),
Customer_Name varchar(255),
Total_Finance varchar(255),
Rate_Class varchar(255),
Supplier_Name varchar(255),
DecisionStatus varchar(255)
)"""
query = """
INSERT INTO [LEAF].[ZZZ] (
Lease_Number,
Start_Date,
Report_Status,
Status_Date,
Current_Status,
Sales_Rep,
Customer_Name,
Total_Finance,
Rate_Class,
Supplier_Name,
DecisionStatus
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"""
# execute create table
try:
cursor.execute(query1)
conn.commit()
except pyodbc.ProgrammingError:
pass
# grab existing row count in the database for validation later
cursor.execute("SELECT count(*) FROM LEAF.ZZZ")
before_import = cursor.fetchone()
for r in range(1, sheet.nrows):
Lease_Number = sheet.cell(r,0).value
Start_Date = sheet.cell(r,1).value
Report_Status = sheet.cell(r,2).value
Status_Date = sheet.cell(r,3).value
Current_Status= sheet.cell(r,4).value
Sales_Rep = sheet.cell(r,5).value
Customer_Name = sheet.cell(r,6).value
Total_Financed= sheet.cell(r,7).value
Rate_Class = sheet.cell(r,8).value
Supplier_Name = sheet.cell(r,9).value
DecisionStatus= sheet.cell(r,10).value
# Assign values from each row
values = (Lease_Number, Start_Date, Report_Status, Status_Date, Current_Status,
Sales_Rep, Customer_Name, Total_Financed, Rate_Class, Supplier_Name,
DecisionStatus)
# Execute sql Query
cursor.execute(query, values)
# Commit the transaction
conn.commit()
# If you want to check if all rows are imported
cursor.execute("SELECT count(*) FROM LEAF.ZZZ")
result = cursor.fetchone()
print((result[0] - before_import[0]) == len(data.index)) # should be True
# Close the database connection
conn.close()

Operational Error in SQL

I'm running sqlite3 and I cannot, for the life of me work out what is going wrong in my code. Baring in mind that it doesn't crash when creating other tables that come before it in the database. Part of me suspects it may be a case of capitalisation somewhere on an SQL statement but it was working normally a few days ago.
Create Table function
def create_table(db_name,table_name,sql):
with sqlite3.connect(db_name) as db:
cursor = db.cursor()
cursor.execute("select name from sqlite_master where name=?",(table_name,))
result = cursor.fetchall()
keep_table = True
if len(result) == 1:
response = input("The table {0} already exists, do you wish to recreate (y/n): ".format(table_name)) #option to recreate the database
if response == 'y':
keep_table = False
print("The {0} table will be recreated - all existing data will be lost".format(table_name))
cursor.execute("drop table if exists {0}".format(table_name))
db.commit()
elif response == 'n':
print("The existing table was kept")
else:
print("Incorrect input, please try again.") #validation measure
if len(result) == 1:
response = input("The table {0} already exists, do you wish to recreate (y/n): ".format(table_name))
if response == 'y':
keep_table = False
print("The {0} table will be recreated - all existing data will be lost".format(table_name))
cursor.execute("drop table if exists {0}".format(table_name))
db.commit()
else:
keep_table = False
if not keep_table:
cursor.execute(sql)
db.commit()
Tables that are causing the problem?
def create_customer_table():
sql = """create table Customer
(CustomerID integer,
FirstName text,
LastName text,
Street text,
Town text,
Postcode text,
TelephoneNumber text,
EmailAddress text
primary key(CustomerID))"""
create_table(db_name, "Customer", sql)
*ignore the fact it isn't indented, it is on my program.
def create_customer_order_table():
sql = """create table CustomerOrder
(OrderID integer,
CustomerID integer,
Date date,
Time integer
primary key(OrderID)
foreign key(CustomerID) references Customer(CustomerID))"""
create_table(db_name, "CustomerOrder", sql)
Here is the error I receive:
Traceback (most recent call last):
File "/Users/X/Downloads/manage_table_March_2015.py", line 110, in <module>
create_customer_table()
File "/Users/X/Downloads/manage_table_March_2015.py", line 78, in create_customer_table
create_table(db_name, "Customer", sql)
File "/Users/X/Downloads/manage_table_March_2015.py", line 33, in create_table
cursor.execute(sql)
ysqlite3.OperationalError: near "(": syntax error

insert two values from an mysql table into another table using a python program

I'm having a small problem with a Python program (below) that I'm writing.
I want to insert two values from a MySQL table into another table from a Python program.
The two fields are priority and product and I have selected them from the shop table and I want to insert them into the products table.
Can anyone help? Thanks a lot. Marc.
import MySQLdb
def checkOut():
db = MySQLdb.connect(host='localhost', user = 'root', passwd = '$$', db = 'fillmyfridge')
cursor = db.cursor(MySQLdb.cursors.DictCursor)
user_input = raw_input('please enter the product barcode that you are taking out of the fridge: \n')
cursor.execute('update shops set instock=0, howmanytoorder = howmanytoorder + 1 where barcode = %s', (user_input))
db.commit()
cursor.execute('select product, priority from shop where barcode = %s', (user_input))
rows = cursor.fetchall()
cursor.execute('insert into products(product, barcode, priority) values (%s, %s)', (rows["product"], user_input, rows["priority"]))
db.commit()
print 'the following product has been removed from the fridge and needs to be ordered'
You don't mention what the problem is, but in the code you show this:
cursor.execute('insert into products(product, barcode, priority) values (%s, %s)', (rows["product"], user_input, rows["priority"]))
where your values clause only has two %s's in it, where it should have three:
cursor.execute('insert into products(product, barcode, priority) values (%s, %s, %s)', (rows["product"], user_input, rows["priority"]))
Well, the same thing again:
import MySQLdb
def checkOut():
db = MySQLdb.connect(host='localhost', user = 'root', passwd = '$$', db = 'fillmyfridge')
cursor = db.cursor(MySQLdb.cursors.DictCursor)
user_input = raw_input('please enter the product barcode that you are taking out of the fridge: \n')
cursor.execute('update shops set instock=0, howmanytoorder = howmanytoorder + 1 where barcode = %s', (user_input))
db.commit()
cursor.execute('select product, priority from shop where barcode = %s', (user_input))
rows = cursor.fetchall()
Do you need fetchall()?? Barcode's are unique I guess and one barcode is to one product I guess. So, fetchone() is enough....isn't it??
In any case if you do a fetchall() its a result set not a single result.
So rows["product"] is not valid.
It has to be
for row in rows:
cursor.execute('insert into products(product, barcode, priority) values (%s, %s, %s)', (row["product"], user_input, row["priority"]))
db.commit()
print 'the following product has been removed from the fridge and needs to be ordered'
or better
import MySQLdb
def checkOut():
db = MySQLdb.connect(host='localhost', user = 'root', passwd = '$$', db = 'fillmyfridge')
cursor = db.cursor(MySQLdb.cursors.DictCursor)
user_input = raw_input('please enter the product barcode that you are taking out of the fridge: \n')
cursor.execute('update shops set instock=0, howmanytoorder = howmanytoorder + 1 where barcode = %s', (user_input))
cursor.execute('insert into products(product, barcode, priority) select product, barcode, priority from shop where barcode = %s', (user_input))
db.commit()
Edit: Also, you use db.commit() almost like print - anywhere, you need to read and understand the atomicity principle for databases

Categories