I can successfully connect to SQL Server from my jupyter notebook with this script:
from sqlalchemy import create_engine
import pyodbc
import csv
import time
import urllib
params = urllib.parse.quote_plus('''DRIVER={SQL Server Native Client 11.0};
SERVER=SV;
DATABASE=DB;
TRUSTED_CONNECTION=YES;''')
engine = create_engine("mssql+pyodbc:///?odbc_connect=%s" % params)
And I can successfully execute SQL stored procedures without parameters from jupyter notebook with the following function :
def execute_stored_procedure(engine, procedure_name):
res = {}
connection = engine.raw_connection()
try:
cursor = connection.cursor()
cursor.execute("EXEC "+procedure_name)
cursor.close()
connection.commit()
res['status'] = 'OK'
except Exception as e:
res['status'] = 'ERROR'
res['error'] = e
finally:
connection.close()
return res
How please could I transform this previous function for stored procedures having several parameters (two in my case) ?
Solution of my problem, working only for stored procedures with 0 or 2 parameters (just edit the 10th line if you want another number of parameters):
def execute_stored_procedure(engine, procedure_name,params_dict=None):
res = {}
connection = engine.raw_connection()
try:
cursor = connection.cursor()
if params_dict is None:
cursor.execute("EXEC "+procedure_name)
else:
req = "EXEC "+procedure_name
req += ",".join([" #"+str(k)+"='"+str(v)+"'" for k,v in params_dict.items()])
cursor.execute(req)
cursor.close()
connection.commit()
res['status'] = 'OK'
except Exception as e:
res['status'] = 'ERROR'
res['error'] = e
finally:
connection.close()
return res
Related
I am trying to execute the mentioned code and I got the following error:
Not able to get the exact cause of the error
2023-02-10 14:44:27,611 ERROR : No results. Previous SQL was not a query.
Traceback (most recent call last):
File "<ipython-input-1-00966ada7c84>", line 30, in <module>
for row in rows:
pyodbc.ProgrammingError: No results. Previous SQL was not a query.
Code:
import pyodbc
import pandas as pd
import sqlalchemy
from arcgis.geocoding import geocode
from arcgis.gis import GIS
import logging
logging.basicConfig(filename="C:\\Users\\addr_errors.log",filemode='w', level= logging.ERROR,format= '%(asctime)s %(levelname)s : %(message)s')
try:
gis = GIS('home')
# Connect to the SQL Server database
conn = pyodbc.connect(
"DRIVER=;"
"SERVER=;"
"DATABASE=;"
"Trusted_Connection=yes;")
# Create a cursor from the connection
cursor = conn.cursor()
# Execute a SELECT statement to get the rows to update
rows = cursor.execute("SELECT top 1 * FROM [dbo].[Aff_SC] where pflag is null ORDER BY AffiliationID")
if not rows:
print("No results found")
else:
for row in rows:
# Get the values from the current row
address = row.OldAddress
vaffliationid = row.AffiliationID
print(address)
print(vaffliationid)
# Geocode the address
result = geocode(address, as_featureset=True)
#print(result)
if result is not None:
try:
best_match = result.features[0]
print(best_match)
except IndexError:
best_match = None
print(vaffliationid)
update_query = f"UPDATE [dbo].[Aff_SC] SET pflag = 1 WHERE OldAddress = '{address}' and AffiliationID = '{vaffliationid}'"
cursor.execute(update_query)
if best_match is not None:
# Get the standardized address
standardized_address = best_match.attributes["Match_addr"]
print("standardized_address")
print(standardized_address)
#print(vaffliationid)
update_query = f"UPDATE [dbo].[Aff_SC] SET NewAddress = '{standardized_address}' , pflag = 1 WHERE OldAddress = '{address}' and AffiliationID = '{vaffliationid}'"
cursor.execute(update_query)
# Commit the changes to the database
conn.commit()
# Close the cursor and the connection
cursor.close()
conn.close()
except Exception as e:
logging.exception(e)
#finally:
logging.shutdown()
#close the log file, overwriting the logfile worked after closing the handlers
handlers = logging.getLogger().handlers[:]
for handler in handlers:
handler.close()
logging.getLogger().removeHandler(handler)
Tried to run the print statements in the blocks of query and looks fine to me
I want to write some basic code to do querys on read only mode on sqlite databases
These are daily db files so it is important after closing the connections not to leave other files in the server like db-shm or db-wal files associated
I have been reading the documentation and it seems that even though I try to close the connections explicitly, these are not closed so these files stay there
import sqlite3
import pandas as pd
def _connect(f):
con = None
try:
con = sqlite3.connect("file:"+f+"?mode=ro", uri=True)
except sqlite3.Error as er:
print('SQLite error in the db connection: %s' % (' '.join(er.args)))
return con
def _disconnect(con):
try:
con.close()
except sqlite3.Error as er:
print('SQLite error in the db disconnection: %s' % (' '.join(er.args)))
return con
def fl_query(file):
'''Returns information about a db file
file : string
absolute path to the db file
returns
-------
list
'''
cn = _connect(file)
cur = cn.cursor()
query = """SELECT ....etc"""
cur.execute(query)
info = [(row[0],row[1],row[2],row[3],row[4],row[5]) for row in cur.fetchall()]
cur.close()
_disconnect(cn)
return info
folder = 'path to the db file'
file = folder+'6100000_2022-09-18.db'
info = fl_query(file)
I have read about how to close cleanly the databases but so far nothing works and db-shm and db-wal stay there everytime I open a file. Remark: it is a server with thousand of files so it is important not to create more files
I realized that I was unable to modify the PRAGMA Journal because I was opening the database in read only mode. In read-write mode it performs the modification of the PRAGMA journal to DELETE mode
import sqlite3
import pandas as pd
def _connect(f):
con = None
try:
con = sqlite3.connect("file:"+f+"?mode=rw", uri=True)
except sqlite3.Error as er:
print('SQLite error in the db connection: %s' % (' '.join(er.args)))
return con
def _disconnect(con):
try:
con.close()
except sqlite3.Error as er:
print('SQLite error in the db disconnection: %s' % (' '.join(er.args)))
return con
def fl_query(file):
'''Returns information about a db file
file : string
absolute path to the db file
returns
-------
list
'''
cn = _connect(file)
cur = cn.cursor()
cur.execute("PRAGMA journal_mode=DELETE")
query = """SELECT ....etc"""
cur.execute(query)
info = [(row[0],row[1],row[2],row[3],row[4],row[5]) for row in cur.fetchall()]
cur.close()
_disconnect(cn)
This is not something complicated but not sure why is it not working
import mysql.connector
def get_connection(host, user, password, db_name):
connection = None
try:
connection = mysql.connector.connect(
host=host,
user=user,
use_unicode=True,
password=password,
database=db_name
)
connection.set_charset_collation('utf8')
print('Connected')
except Exception as ex:
print(str(ex))
finally:
return connection
with connection.cursor() as cursor:
sql = 'UPDATE {} set underlying_price=9'.format(table_name)
cursor.execute(sql)
connection.commit()
print('No of Rows Updated ...', cursor.rowcount)
It always returns 0 no matter what. The same query shows correct count on TablePlus
MysQL API provides this method but I do not know how to call it as calling against connection variable gives error
I am not sure why your code does not work. But i am using pymysql, and it works
import os
import pandas as pd
from types import SimpleNamespace
from sqlalchemy import create_engine
import pymysql
PARAM = SimpleNamespace()
PARAM.DB_user='yourname'
PARAM.DB_password='yourpassword'
PARAM.DB_name ='world'
PARAM.DB_ip = 'localhost'
def get_DB_engine_con(PARAM):
DB_name = PARAM.DB_name
DB_ip = PARAM.DB_ip
DB_user = PARAM.DB_user
DB_password = PARAM.DB_password
## engine = create_engine("mysql+pymysql://{user}:{pw}#{ip}/{db}".format(user=DB_user,pw=DB_password,db=DB_name,ip=DB_ip))
conn = pymysql.connect(host=DB_ip, user=DB_user,passwd=DB_password,db=DB_name)
cur = conn.cursor()
return cur, conn ## , engine
cur, conn = get_DB_engine_con(PARAM)
and my data
if i run the code
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL' ".format(table_name)
cur.execute(sql)
conn.commit()
print('No of Rows Updated ...', cur.rowcount)
the result No of Rows Updated ... 10 is printed. and the NLD is changed to NL
If using mysql.connector
import mysql.connector
connection = mysql.connector.connect(
host=PARAM.DB_ip,
user=PARAM.DB_user,
use_unicode=True,
password=PARAM.DB_password,
database=PARAM.DB_name
)
cur = connection.cursor()
table_name='ct2'
sql = "UPDATE {} set CountryCode='NL2' ".format(table_name)
cur.execute(sql)
print('No of Rows Updated ...', cur.rowcount)
connection.commit()
it still works
and the country code is updated to NL2 and No of Rows Updated ... 10 is printed. The second time i run then No of Rows Updated ... 0 is printed.
Not sure why it does not work on your machine.
I have a problem with creating SQL query for Oracle database using Python.
I want to bind string variable and it does not work, could you tell me what am I doing wrong?
This is my code:
import cx_Oracle
dokList = []
def LoadDatabase():
conn = None
cursor = None
try:
conn = cx_Oracle.connect("login", "password", "localhost")
cursor = conn.cursor()
query = "SELECT * FROM DOCUMENT WHERE DOC = :param"
for doknumber in dokList:
cursor.execute(query, {'doknr':doknumber})
print(cursor.rowcount)
except cx_Oracle.DatabaseError as err:
print(err)
finally:
if cursor:
cursor.close()
if conn:
conn.close()
def CheckData():
with open('changedNamed.txt') as f:
lines = f.readlines()
for line in lines:
dokList.append(line)
CheckData()
LoadDatabase()
The output of cursor.rowcount is 0 but it should be number greater than 0.
You're using a dictionary ({'doknr' : doknumber}) for your parameter, so it's a named parameter - the :param needs to match the key name. Try this:
query = "SELECT * FROM DOCUMENT WHERE DOC = :doknr"
for doknumber in dokList:
cursor.execute(query, {'doknr':doknumber})
print(cursor.rowcount)
For future troubleshooting, to check whether your parameter is getting passed properly, you can also try changing your query to "select :param from dual".
For a specific use case - in which I have 100 databases and 1 database is the central database, now my app connects to that one central database which spawns connections to any of the 100 databases as per the request of the user to run some query on any of them.
In this case does using DRCP makes same as I dont want the connection to be killed if the user is running the query at the same time I dont want too many connections to be opened to the db which I control by creating a profile on the database which limits the number of active sessions to some low number say 5 for that specific user(read_only_user) using that specific profile(read_only_profile).
Right now I am using the standard open a connection per request model. But Im not sure if thats the best way to go about it.
import cx_Oracle
import logging, time
class Database(object):
'''
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
'''
def __init__(self, user, password, host, port, service_name, mode, *args):
#mode should be 0 if not cx_Oracle.SYSDBA
self.user = user
self.password = password
self.host = host
self.port = port
self.user = user
self.service_name = service_name
self.logger = logging.getLogger(__name__)
try:
self.mode = mode
except:
self.mode = 0
self.logger.info(" Mode is not mentioned while creating database object")
self.connection = None
dsn = cx_Oracle.makedsn(self.host, self.port, self.service_name)
self.connect_string = self.user + '/' + self.password + '#' + dsn
try:
self.connection = cx_Oracle.connect(self.connect_string, mode=self.mode,
threaded=True)
self.connection.stmtcachesize = 1000
self.connection.client_identifier = 'my_app'
self.cursor = self.connection.cursor()
self.idVar = self.cursor.var(cx_Oracle.NUMBER)
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.exception(
'Exception occured while trying to create database object : %s',
error.message)
raise exc
def query(self, q):
try:
self.cursor.execute(q)
return self.cursor.fetchall()
except cx_Oracle.DatabaseError, exc:
error, = exc
self.logger.info(
"Error occured while trying to run query: %s, error : %s", q,
error.message)
return error.message
def dml_query(self, sql):
try:
self.cursor.execute(sql)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def dml_query_with_data(self, sql, data):
"""
Use this method to for DML SQLS :
Inputs - Sql to be executed. Data related to that sql
Returns - The last inserted, updated, deleted ID.
"""
try:
self.cursor.execute(sql, data)
self.connection.commit()
return 1
except Exception as e:
self.logger.exception(e)
return 0
def update_output(self, clob, job_id, flag):
try:
q = "Select output from my_table where job_id=%d" % job_id
self.cursor.execute(q)
output = self.cursor.fetchall()
#Checking if we already have some output in the clob for that job_id
if output[0][0] is None:
if flag == 1:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="COMPLETED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = :p_clob
,job_status=:status WHERE job_id = :p_key""",
p_clob=clob, status="FAILED", p_key=job_id)
else:
self.cursor.execute("""UPDATE my_table
SET OUTPUT = OUTPUT || ',' || :p_clob
WHERE job_id = :p_key""", p_clob=clob, p_key=job_id)
self.connection.commit()
rows_updated = self.cursor.rowcount
return rows_updated
except Exception as e:
self.logger.exception(e)
return 0
def __del__(self):
try:
if self.connection is not None:
self.connection.close()
except Exception as e:
self.logger.exception(
"Exception while trying to close database connection object : %s", e)
'''
if __name__ == '__main__':
db = Database('test', 'test', 'my_host', '1000', 'my_db', 0)
columns = db.query('select * from my-table')
print columns
'''
This is my database class, and I create an object whenever I need a connect to the DB. And the init and del method take care of constructing and destructing the object.
Should I be using DRCP/ sessionPool to improve performance.
What if there are too many users waiting coz all the connections in DRCP are taken?
Can I have sessionPool per database (for the 100 databases, each database can take atmost 5 connections at a time for that read_only_user)