Can I get dictionary from pymysql? - python

I have a following question. I would like to get content of a table in dictionary format. My code:
from configparser import ConfigParser
import pymysql as mysql
class Database:
def __init__(self):
config = ConfigParser()
config.read("config.ini")
user = config["db"]["user"]
passwd = config["db"]["password"]
host = config["db"]["host"]
db = config["db"]["database"]
self.connection = mysql.connect(
user=user,
passwd=passwd,
host=host,
db=db,
)
self.cursor = self.connection.cursor()
def init_dict(self):
query = """
SELECT a, b
FROM mytable
"""
self.cursor.execute(query)
data = self.cursor.fetchall()
return dict(data)
Is there a more pythonic way how to get data in the method init_dict as a dictionary? I found, that it should be possible to return a dict directly from the cursor, but I don`t know how. Thanks a lot.

Related

Elements from database not deleted

I am currently creating a program using SQLite3. While entries can be added to this database, I cannot seem to be able to delete them.
import database
db_actions = database.DatabaseActions()
def set_delete_student_parameters():
del_student_number = delete_number_enter
DeleteStudent(del_student_number)
def DeleteStudent(del_student_number):
db_actions.delete_student(del_student_number)
import sqlite3
my_conn = sqlite3.connect('general_db.db')
print("Connected to database successfully")
class DatabaseActions:
def __init__(self):
self.db = sqlite3.connect('general_db.db') # Open database connection here
self.setup_tables()
def delete_student(self, student_number):
cursor = self.db.cursor()
sql = "DELETE FROM Students WHERE StudentNumber=?"
cursor.execute(sql, [str(student_number)])
self.db.commit()

perform upsert operation on postgres like pandas to_sql function using python

Before asking this question, I have read many links about UPSERT operation on Postgres:
PostgreSQL Upsert Using INSERT ON CONFLICT statement
Anyway to Upsert database using PostgreSQL in Python
But the question is different from them, since the functionality is different. What I want is to implement something like pandas to_sql function which has the following features:
Automatically creates table
Keeps the data types of each column
The only drawback of to_sql is that it doesn't UPSERT operation on Postgres. Is there anyway to implement the expected functionality (automatically create table based on columns, perform UPSERT operation and keep data types) by passing dataframe to it?
Previously implemented code using Pandas to_sql function:
class PostgreSQL:
def __init__(self):
postgres_config = config_dict[Consts.POSTGRES.value]
self.host = postgres_config[Consts.HOST.value]
self.port = postgres_config[Consts.PORT.value]
self.db_name = postgres_config[Consts.DB_NAME.value]
self.username = postgres_config[Consts.USERNAME.value]
self.password = postgres_config[Consts.PASSWORD.value]
def get_connection(self) -> object:
url_schema = Consts.POSTGRES_URL_SCHEMA.value.format(
self.username, self.password, self.host, self.port, self.db_name
)
try:
engine = create_engine(url_schema)
return engine
except Exception as e:
logger.error('Make sure you have provided correct credentials for the DB connection.')
raise e
def save_df_to_db(self, df: object, table_name: str) -> None:
df.to_sql(table_name, con=self.get_connection(), if_exists='append')
I have written a very generic code that performs UPSERT which is not supported officially in Postgres (until December 2021), using Pandas dataframe and in an efficient way.
By using the following code, it will update the existing primary key otherwise it will create a new table (in case table name doesn't exist) and add new records to the table.
Code:
import numpy as np
import pandas as pd
from sqlalchemy import create_engine, Table
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.ext.automap import automap_base
class PostgreSQL:
def __init__(self):
postgres_config = config_dict[Consts.POSTGRES.value]
self.host = postgres_config[Consts.HOST.value]
self.port = postgres_config[Consts.PORT.value]
self.db_name = postgres_config[Consts.DB_NAME.value]
self.username = postgres_config[Consts.USERNAME.value]
self.password = postgres_config[Consts.PASSWORD.value]
def get_connection(self) -> object:
url_schema = 'postgresql://{}:{}#{}:{}/{}'.format(
self.username, self.password, self.host, self.port, self.db_name
)
try:
engine = create_engine(url_schema)
return engine
except Exception as e:
logger.error('Make sure you have provided correct credentials for the DB connection.')
raise e
def run_query(self, query: str) -> list:
engine = self.get_connection()
return engine.execute(query).fetchall()
def save_df_to_db(self, df: object, table_name: str) -> None:
root_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')
engine = self.get_connection()
add_primary_key_query = get_query(root_dir, Directories.COMMON.value, 'add_primary_key.sql', table_name)
table_existence_query = get_query(root_dir, Directories.COMMON.value, 'table_existence.sql', table_name)
if not engine.execute(table_existence_query).first()[0]: # if table does not exist
logger.info('Create table automatically and from scratch!')
df.to_sql(table_name, con=self.get_connection(), if_exists='append')
engine.execute(add_primary_key_query)
else:
try:
df = df.replace("NaT", None)
df = df.replace(pd.NaT, None)
df = df.replace({pd.NaT: None})
df_dict = df.to_dict('records')
except AttributeError as e:
logger.error('Empty Dataframe!')
raise e
with engine.connect() as connection:
logger.info('Table already exists!')
base = automap_base()
base.prepare(engine, reflect=True,)
target_table = Table(table_name, base.metadata,
autoload=True, autoload_with=engine,)
chunks = [df_dict[i:i + 1000] for i in range(0, len(df_dict), 1000)]
for chunk in chunks:
stmt = insert(target_table).values(chunk)
update_dict = {c.name: c for c in stmt.excluded if not c.primary_key}
connection.execute(stmt.on_conflict_do_update(
constraint=f'{table_name}_pkey',
set_=update_dict)
)
logger.info('Saving data is successfully done.')
Table existence query:
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = '{}'
);
Add primary key query:
ALTER TABLE {} add primary key (id);

Use multiple functions in MySQLdb (python)

this code below runs just fine, but i want to separate this code into function (this is my first time using MySQLdb) ,
import MySQLdb
# Open database connection
db = MySQLdb.connect("localhost","user_name","pass","db_name" )
cursor = db.cursor()
sql = "SELECT activity_log.datetime FROM activity_log"
cursor.execute(sql)
date_data = cursor.fetchall()
for content in date_data:
print content
# disconnect from server
db.close()
Question: how could I create one database connection and use it to multiple functions, this is what i've wrote so far (doesn't work):
import MySQLdb
class DB():
def __init__(self):
db = MySQLdb.connect("locahost", "user_name", "pass", "db_name")
self.cur = db.cursor()
def time_statistic(self):
sql = "SELECT activity_log.datetime FROM activity_log"
self.cur.execute(sql)
self.date_data = self.cursor.fetchone()
for content in self.date_data:
print content
def test1(self):
pass
if __name__ == '__main__':
db = DB.connect("db_name" )
db.time_statistic(self)
db.test1(self)
db.close()
You need to pass the arguments to init to make sure that the class connects to the right DB.
import MySQLdb
class DB():
def __init__(self, server, user, password, db_name):
db = MySQLdb.connect(server, user, password, db_name)
self.cur = db.cursor()
def time_statistic(self):
sql = "SELECT activity_log.datetime FROM activity_log"
self.cur.execute(sql)
self.date_data = self.cursor.fetchone()
for content in self.date_data:
print content
def test1(self):
pass
if __name__ == '__main__':
db = DB(<server>, <user>, <password>, <db_name>)
db.time_statistic()
db.test1()
Replace the arguments in <> with actual values you need to connect to the db. You may also want to add some error handling in the above code.

Unable to execute Postgres queries in Python

I'm trying to create a Postgres table using psycopg2 in Python as follows:
import psycopg2
class DbOperations (object):
def __init__(self):
self.dummy = None
self.conn = None
self.cur = None
self.query = None
self.db_name = "alarm_log"
self.table_name = "alarms"
self.user = "cayman"
self.password = "admin"
self.host = "127.0.0.1"
def db_connect(self):
self.conn = psycopg2.connect(dbname=self.db_name, user=self.user, password=self.password, host=self.host)
self.cur = self.conn.cursor()
def db_disconnect(self):
self.conn.close()
def db_create_table(self):
self.query ="""
CREATE TABLE COMPANY(
ID INT PRIMARY KEY NOT NULL,
NAME TEXT NOT NULL,
AGE INT NOT NULL,
ADDRESS CHAR(50),
SALARY REAL
);
"""
print (self.query)
self.cur.execute(self.query)
Then I construct the object as follows:
db_app = DbOperations()
db_app.db_connect()
db_app.db_create_table()
I am able to manually connect to the database and create the table. However, I'm not able to do so using Python.
There are no exceptions or error messages. When I try to list the tables in the database manually, I don't find my newly created table.
Any suggestions what could be wrong ?
Seems, you are missing the commit at the end of db_create_table method:
self.conn.commit()
Iron Fist's answer is absolutely correct, but if you don't want to have commits all over your code, you can also set it on the connection like this:
def db_connect(self):
self.conn = psycopg2.connect(dbname=self.db_name, user=self.user, password=self.password, host=self.host)
self.conn.autocommit = True
self.cur = self.conn.cursor()

Python 2.7 connection string to PostgreSQL (OOP method)

I'm new to python and I'm trying to make this work. I'm using Python 2.7 and PostgreSQL 9.3:
#! F:\Python2.7.6\python
import psycopg2
class Database:
host = "192.168.56.101"
user = "testuser"
passwd = "passwd"
db = "test"
def __init__(self):
self.connection = psycopg2.connect( host = self.host,
user = self.user,
password = self.passwd,
dbname = self.db )
self.cursor = self.connection.cursor
def query(self, q):
cursor = self.cursor
cursor.execute(q)
return cursor.fetchall()
def __del__(self):
self.connection.close()
if __name__ == "__main__":
db = Database()
q = "DELETE FROM testschema.test"
db.query(q)
However I am getting an error "AttributeError: 'builtin_function_or_method' object has no attribute 'execute'". I figure I should put something like self.execute = something in the Database class, but I can't figure it out what exactly I need to put there. Any suggestions?
You are missing the parenthesis at the end
self.cursor = self.connection.cursor()
or
cursor = self.cursor()
But not both

Categories