drop all databases at the same time - python

I want to drop all of my databases at the same time and I have done this function so far but unfortunately, it doesn't work and gives me no error at all nothing.
import mysql.connector
def drop_database():
print('CONNECTING ...')
mydb = mysql.connector.connect(
host="xxx",
user="xxx",
password="xxx",
port='xxx'
)
print('CONNECTED')
# database_list = []
drop_command = 'drop database'
show_databases = 'SHOW DATABASES;'
my_cursor = mydb.cursor(buffered=True)
my_cursor.execute(show_databases)
for show_databases in my_cursor:
my_cursor.execute(drop_command, str(show_databases[0]), ';')
for show_databases in my_cursor:
print(show_databases[0])
mydb.commit()
mydb.close()
drop_database()

Related

Python: No of rows are always 9 and does not return affected rows count after UPDATE query

This is not something complicated but not sure why is it not working
import mysql.connector
def get_connection(host, user, password, db_name):
connection = None
try:
connection = mysql.connector.connect(
host=host,
user=user,
use_unicode=True,
password=password,
database=db_name
)
connection.set_charset_collation('utf8')
print('Connected')
except Exception as ex:
print(str(ex))
finally:
return connection
with connection.cursor() as cursor:
sql = 'UPDATE {} set underlying_price=9'.format(table_name)
cursor.execute(sql)
connection.commit()
print('No of Rows Updated ...', cursor.rowcount)
It always returns 0 no matter what. The same query shows correct count on TablePlus
MysQL API provides this method but I do not know how to call it as calling against connection variable gives error
I am not sure why your code does not work. But i am using pymysql, and it works
import os
import pandas as pd
from types import SimpleNamespace
from sqlalchemy import create_engine
import pymysql
PARAM = SimpleNamespace()
PARAM.DB_user='yourname'
PARAM.DB_password='yourpassword'
PARAM.DB_name ='world'
PARAM.DB_ip = 'localhost'
def get_DB_engine_con(PARAM):
DB_name = PARAM.DB_name
DB_ip = PARAM.DB_ip
DB_user = PARAM.DB_user
DB_password = PARAM.DB_password
## engine = create_engine("mysql+pymysql://{user}:{pw}#{ip}/{db}".format(user=DB_user,pw=DB_password,db=DB_name,ip=DB_ip))
conn = pymysql.connect(host=DB_ip, user=DB_user,passwd=DB_password,db=DB_name)
cur = conn.cursor()
return cur, conn ## , engine
cur, conn = get_DB_engine_con(PARAM)
and my data
if i run the code
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL' ".format(table_name)
cur.execute(sql)
conn.commit()
print('No of Rows Updated ...', cur.rowcount)
the result No of Rows Updated ... 10 is printed. and the NLD is changed to NL
If using mysql.connector
import mysql.connector
connection = mysql.connector.connect(
host=PARAM.DB_ip,
user=PARAM.DB_user,
use_unicode=True,
password=PARAM.DB_password,
database=PARAM.DB_name
)
cur = connection.cursor()
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL2' ".format(table_name)
cur.execute(sql)
print('No of Rows Updated ...', cur.rowcount)
connection.commit()
it still works
and the country code is updated to NL2 and No of Rows Updated ... 10 is printed. The second time i run then No of Rows Updated ... 0 is printed.
Not sure why it does not work on your machine.

Python: MySQL is not updating record despite of using Commit

I am facing a weird issue. I have the following code. The INSERTS go well but the update query does not work at all. The rowcount is also shown 1 still when I check in Table Plus it does not reflect.
When I directly run the query UPDATE shop_links set product_status = 3 where shop_url = 'https://example.com' in TablePlus it does show record.
The irony is, the update query which set to 1 works just fine and updates instantly
import mysql.connector
def get_connection(host, user, password, db_name):
connection = None
try:
# connection = pymysql.connect(host=host,
# user=user,
# password=password,
# db=db_name,
# charset='utf8',
# max_allowed_packet=1073741824,
# cursorclass=pymysql.cursors.DictCursor)
connection = mysql.connector.connect(
host=host,
user=user,
use_unicode=True,
password=password,
database=db_name
)
connection.set_charset_collation('utf8')
print('Connected')
except Exception as ex:
print(str(ex))
finally:
return connection
with connection.cursor() as cursor:
sql = 'INSERT INTO {} (shop_url,product_url) VALUES (%s, %s)'.format(TABLE_FETCH_PRODUCTS)
cursor.executemany(sql, records)
connection.commit()
with connection.cursor() as cursor:
# Update the shop URL
# sql = "UPDATE {} set product_status = 3 where shop_url = '{}' ".format(TABLE_FETCH, shop_url)
sql = "UPDATE {} set product_status = 3 where shop_url = %s ".format(TABLE_FETCH, shop_url)
print(sql)
print('----------------------------------------------------------------')
cursor.execute(sql, (shop_url,))
connection.commit()

Insert Data in mysql using python

import mysql.connector
mydb = mysql.connector.connect(
host="10.0.72.17",
user="admin",
passwd="1qaz!QAZ",
database="test"
)
mycursor = mydb.cursor()
sql = "INSERT INTO biage(kompaniis_saxeli) VALUES (%s)"
val = ('bane')
mycursor.execute(sql, val)
mycursor = mydb.cursor()
mydb.commit()
This is my python code , and i create column
kompaniis_saxeli varchar(225)
but when i try to run this code there is error
raise ValueError("Could not process parameters")
ValueError: Could not process parameters
The python driver needs at least a 2 dimensional list for values
So use:
import mysql.connector
mydb = mysql.connector.connect(
host="10.0.72.17",
user="admin",
passwd="1qaz!QAZ",
database="test"
)
mycursor = mydb.cursor()
sql = "INSERT INTO biage(kompaniis_saxeli) VALUES (%s)"
val = ('bane',)
mycursor.execute(sql, val)
mycursor = mydb.cursor()
mydb.commit()

AWS: Unable to insert values in Redshift table

I am trying to insert values into a table within my redshift cluster, it is connected as I can read the table but I can't insert values on it. When I use SELECT statements it works fine but when I try to insert values from lambda function, it is aborted with no error or log info about why was it aborted.
The query part is like this:
conn = psycopg2.connect(dbname = 'dev',
host =
'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%')
cur = conn.cursor()
cur.execute("INSERT INTO public.summaries(topic,summary)
values('data', 'data_summary');")
#print(cur.fetchone())
cur.close()
conn.close()
As I said, there is no log information about why was it aborted, neither it is giving me any kind of error. Actually, when I just use a Select statement, it works.
Is there anyone who can guide me through what could be going on?
You forgot to do conn.commit()
conn = psycopg2.connect(dbname = 'dev',
host = 'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%')
cur = conn.cursor()
cur.execute("INSERT INTO public.summaries(topic,summary) values('data', 'data_summary');")
cur.close()
conn.commit()
conn.close()
a bit improved way to run this
from contextlib import contextmanager
#contextmanager
def cursor():
with psycopg2.connect(dbname = 'dev',
host = 'redshift-cluster-summaries.c0xcgwtgz65l.us-east-2.redshift.amazonaws.com',
port = '5439',
user = '****',
password = '****%') as conn:
try:
yield conn.cursor()
finally:
try:
conn.commit()
except psycopg2.InterfaceError:
pass
def run_insert(query):
with cursor() as cur:
cur.execute(query)
cur.close()
run_insert("INSERT INTO public.summaries(topic,summary) values('data', 'data_summary');")

Python array to mysql DB

Why i can't save a array list to mysql DB ^^.
This Script work
Code work:
########################################
# Importing modules
import mysql.connector
conn = mysql.connector.connect(
host="localhost",
user="*******",
password="*********",
database="meineTestDB",
)
cursor = conn.cursor()
insert_stmt = (
"INSERT INTO EMPLOYEE (FIRST_NAME, LAST_NAME)"
"VALUES (%s, %s)"
)
data = ('Test1', 'Test2')
try:
# Executing the SQL command
cursor.execute(insert_stmt, data)
# Commit your changes in the database
conn.commit()
except:
# Rolling back in case of error
conn.rollback()
print("Data inserted")
#Closing the connection
conn.close()
########################################
but i want save array to MySQL DB
I try | data = (cars1, cars2) | or | data = ((cars1), (cars2)) | but it doesn't work.
########################################
# Importing modules
import mysql.connector
conn = mysql.connector.connect(
host="localhost",
user="*******",
password="*********",
database="meineTestDB",
)
cars1 = ["Ford", "Volvo", "BMW"]
cars2 = ["Ford", "Volvo", "BMW"]
cursor = conn.cursor()
insert_stmt = (
"INSERT INTO EMPLOYEE (FIRST_NAME, LAST_NAME)"
"VALUES (%s, %s)"
)
data = (cars1, cars2)
try:
# Executing the SQL command
cursor.execute(insert_stmt, data)
# Commit your changes in the database
conn.commit()
except:
# Rolling back in case of error
conn.rollback()
print("Data inserted")
#Closing the connection
conn.close()
########################################
sry for this Symple question i'm new to that Space.
I hope you can help my.
THX for all answer.
It looks like your post is mostly code; please add some more details. 0.o

Categories