Python and PostGres | Showing data in a QLabel (psycopg2) - python

im using Postgres together with python(psycopg2). Im trying to insert data in a QLabel. It shows the data, but the data comes with clinges. How do I get rid of the clinges?
My code:
def getstunden():
conn = None
try:
conn = psycopg2.connect("dbname=test user=postgres password=test")
cur = conn.cursor()
cur.execute("SELECT stunden FROM ueberstunden WHERE name = 'test'")
row = cur.fetchone()
while row is not None:
self.s_test.setText(str(row))
row = cur.fetchone()
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
This is what I get out of it:
I want it to just show 12

Per here Cursor:
Note
cursor objects are iterable, so, instead of calling explicitly fetchone() in a loop, the object itself can be used:
cur.execute("SELECT * FROM test;")
for record in cur:
print record
(1, 100, "abc'def")
(2, None, 'dada')
(3, 42, 'bar')
So to simplify:
def getstunden():
conn = None
try:
conn = psycopg2.connect("dbname=test user=postgres password=test")
cur = conn.cursor()
cur.execute("SELECT stunden FROM ueberstunden WHERE name = 'test'")
for row in cur:
self.s_test.setText(str(row[0]))
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()

It's just like this
print(row[0])
in your code
def getstunden():
conn = None
try:
conn = psycopg2.connect("dbname=test user=postgres password=test")
cur = conn.cursor()
cur.execute("SELECT stunden FROM ueberstunden WHERE name = 'test'")
row = cur.fetchone()
while row is not None:
#row without the brackets
self.s_test.setText(str(row[0]))
row = cur.fetchone()
cur.close()
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()

Related

Integer out of range when inserting large number of rows to postgress

I have tried multiple solutions and way around to solve this issue, probably something is still I am missing.
I want to insert a list of values to my database. Here is what I am doing -
import psycopg2
import pandas as pd
Region = [
"Region1",
"Region2",
]
qa = "endpoint1"
def insert_many(data_list):
"""Add data to the table."""
insert_query = """INSERT INTO pbi_forecast(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u)
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
conn = None
try:
conn = psycopg2.connect(
database='db',
user='user',
host='host1',
port=5432,
password=pw
)
cur = conn.cursor()
cur.executemany(insert_query, data_list)
conn.commit()
cur.close()
except(Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
# connect prod2
conn_prod2 = psycopg2.connect(
database='db',
user='user',
host='host2',
port=5432,
password=pw
)
cur_prod2 = conn_prod2.cursor()
for re in region:
sql_prod2_select = f"select * from pbi_forecast where \
run_date >= '2022-04-20 00:00:00'\
and run_date <= '2022-04-22 00:00:00'\
and region = '{re}' ;"
cur_prod2.execute(sql_prod2_select)
forecast = pd.DataFrame(cur_prod2.fetchall())
data_list = [list(row) for row in forecast.itertuples(index=False)]
insert_many(data_list)
I am getting integer out of range error when running it. If I restrict the insert record to somewhere 50 records it works but when running it without any limit it throws this error.
Thanks in advance.

Sqlite 3: Error opening the databaseIncorrect number of bindings supplied. The current statement uses 1, and there are 4 supplied

I've already tried adding in a comma after Name and the question mark in "VALUES" and was getting a syntax error for my parthenthesis.
#app.route("/Disease/new", methods = ["POST"])
def addDisease():
newDisease = {}
conn = None
try:
jsonPostData = request.get_json()
Name = jsonPostData["Name"]
conn = sqlite3.connect("./dbs/ContactTracer.db")
conn.row_factory = sqlite3.Row
sql = """
INSERT INTO Disease(Name) VALUES(?)
"""
cursor = conn.cursor()
cursor.execute(sql, (Name))
conn.commit()
sql = """
SELECT Disease.ID, Disease.Name
From Disease
Where Disease.ID = ?
"""
cursor.execute(sql,(cursor.lastrowid,))
row = cursor.fetchone()
newDisease["ID"] = row["ID"]
newDisease["Name"] = row["Name"]
except Error as e:
print(f"Error opening the database{e}")
abort(500)
finally:
if conn:
conn.close()
return newDisease
Remove the () and check if INSERT succeeded
cursor.execute(sql, Name)
...
if cursor.lastrowid:
cursor.execute(sql, cursor.lastrowid)

Retrieving data returns none

I am trying to retrieve all rows in mysql.
import mysql.connector,sys
user,pw, host,db = 'root','12345','127.0.0.1','CA2Database'
cnx = mysql.connector.connect(user=user, password=pw, host=host, database=db)
cursor = cnx.cursor()
try:
print(cursor.execute('SELECT * FROM student'))
except mysql.connector.Error as err:
print(err)
print("Error Code:", err.errno)
print("SQLSTATE", err.sqlstate)
print("Message", err.msg)
finally:
print()
The output is None . There is data in the table.
https://imgur.com/a/APUuZot
Probably you were missing some necessary function calls. This should do:
user,pw, host,db = 'root','12345','127.0.0.1','CA2Database'
cnx = mysql.connector.connect(user=user, password=pw, host=host, database=db)
cursor = cnx.cursor()
sql_query = 'SELECT * FROM student;'
cursor.execute(sql_query)
data = cursor.fetchall()
cnx.commit()
cursor.close()

Correct approach to test drop table sqlite in unittest

I'm writing unit tests to test my environment.
I have created tests such as:
def test_database_file_present_and_readable(self):
self.assertTrue(os.access(path_db_file, os.R_OK))
def test_connect_to_db(self):
conn = sqlite3.connect(path_db_file)
conn.close()
def test_create_table(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
cur.execute("CREATE TABLE test_table (id integer PRIMARY KEY, name text)")
conn.commit()
conn.close()
def test_insert_into_table(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
cur.execute("insert into test_table (name) values (?)", ["Test value"])
conn.commit()
conn.close()
def test_update_table(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
cur.execute("update test_table set id = 2 where id = ?", [1])
conn.commit()
conn.close()
def test_delete_from_table(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
cur.execute("delete from test_table where id = ?", [2])
conn.commit()
conn.close()
def test_if_test_table_is_empty(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
result = cur.execute("select exists(select 1 from test_table)").fetchall()
conn.commit()
conn.close()
self.assertTrue(result == 1)
def test_delete_table(self):
conn = sqlite3.connect(path_db_file)
cur = conn.cursor()
cur.execute("drop table test_table")
conn.commit()
conn.close()
And during program execution order of tests is unknown - how to set the order or how to clean up database after creating tests with table creation?
You can get pointers about test method execution order here: Python unittest.TestCase execution order
One suggestion - if you are going for such testing, it's better to mock external dependencies like sqlite & test only the code you've written.

How to use "INSERT" in psycopg2 connection pooling?

I use psycopg2 to connect to PostgreSQL on Python and I want to use connection pooling.
I don't know what should I do instead commit() and rollback() when I execute INSERT query.
db = pool.SimpleConnectionPool(1, 10,host=conf_hostname,database=conf_dbname,user=conf_dbuser,password=conf_dbpass,port=conf_dbport)
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con.cursor()
finally:
db.putconn(con)
with get_cursor() as cursor:
cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
I don't get id of inserted record without commit().
UPDATE I can not test the code but I give you some ideas:
You do the commit in connection not in db
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con
finally:
db.putconn(con)
with get_cursor() as cursor:
con.cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
con.commit()
id = cursor.fetchone()
or
# Get Cursor
#contextmanager
def get_cursor():
con = db.getconn()
try:
yield con.cursor()
con.commit()
finally:
db.putconn(con)
with get_cursor() as cursor:
con.cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
Connection pooling exist because creating a new connection to a db can be expensive and not to avoid commits or rollbacks. So you can commit your data without any issue, committing data will not destroy the connection.
here is my working example:
db = pool.SimpleConnectionPool(1, 10,host=conf_hostname,database=conf_dbname,user=conf_dbuser,password=conf_dbpass,port=conf_dbport)
#contextmanager
def get_connection():
con = db.getconn()
try:
yield con
finally:
db.putconn(con)
def write_to_db():
with get_connection() as conn:
try:
cursor = conn.cursor()
cursor.execute("INSERT INTO table (fields) VALUES (values) RETURNING id")
id = cursor.fetchone()
cursor.close()
conn.commit()
except:
conn.rollback()
I think this will be a little more pythonic:
db_pool = pool.SimpleConnectionPool(1, 10,
host=CONF.db_host,
database=CONF.db_name,
user=CONF.db_user,
password=CONF.db_user,
port=CONF.db_port)
#contextmanager
def db():
con = db_pool.getconn()
cur = con.cursor()
try:
yield con, cur
finally:
cur.close()
db_pool.putconn(con)
if __name__ == '__main__':
with db() as (connection, cursor):
try:
cursor.execute("""INSERT INTO table (fields)
VALUES (values) RETURNING id""")
my_id = cursor.fetchone()
rowcount = cursor.rowcount
if rowcount == 1:
connection.commit()
else:
connection.rollback()
except psycopg2.Error as error:
print('Database error:', error)
except Exception as ex:
print('General error:', ex)

Categories