I'm trying to open a cursor to a MySQL-DB. But I'm getting this error:
'NoneType' object has no attribute 'cursor'
Here is a small sourcecode:
class Sample:
def __init__(self):
self.conn = None
self.value = self.setValue()
def connect(self):
self.conn = MySQLdb.connect(...)
#cursor = self.conn.cursor()
#cursor.execute("SELECT ...")
#value = str(cursor.fetchone()[0])
#raise Exception(value)
#cursor.close() <- here everything is working fine
def setValue(self):
if (self.conn == None):
self.connect()
#raise Exception(self.conn.open)
cursor = self.conn.cursor() # ERROR: 'NoneType' object has no attribute 'cursor'
...
If I use the exception I get a 1 ... connection is open.
And if I do the cursor creation and the SQL statement in the 'connect' function everything is working well.
The strange this is, everything looks correct and for some other connections with the same functions everything is working well, too. I don't know how to solve this error. I hope someone can point me in the right direction.
I would change the statement that checks if the connection is open to both check if conn is none as well as if the connection is open. And because you always execute the setValue function I would recommend that you call the connect inside the__init__ function.
class Sample:
conn = None
def __init__(self):
self.connect()
self.value = self.setValue()
self.close()
def connect(self):
self.conn = MySQLdb.connect(...)
def close(self):
if self.conn:
self.conn.close()
def setValue(self):
if not self.conn and not self.conn.open:
self.connect()
cursor = self.conn.cursor()
Also, remember that with the Python MySQL Connector you need to call commit after you execute a insert or update statement.
cur = self.conn.cursor()
cur.execute("...")
self.conn.commit()
Related
There wasn't any problem with connecting a SQL Server to Streamlit. But since I have been changed the SQL Server IP address, I am having a trouble with the connection suddenly.
The errors from Streamlit shown below. (I covered up the table name.)
DatabaseError: Execution failed on sql ' SELECT TOP (100) [SEQ] ,[MCNO] ,[STM] ,[PGNM] ,[MATNR] ,[TBFG] ,[GAMNG] ,[SENDFG] ,[CRTM] ,[PGDAT] FROM Table name is confidential WITH(NOLOCK) ': (208, b"Invalid object name 'Table name is confidential'.DB-Lib error message 20018, severity 16:\nGeneral SQL Server error: Check messages from the SQL Server\n")
Also, I have been used pymssql to connect SQL Server as shown in below.
class DB:
def __init__(self, host, port, db, user, pwd) -> None:
self.conn = None
self.cursor = None
self.init(host, port, db, user, pwd)
def __enter__(self):
return self
def execute(self, query):
self.cursor.execute(query)
def commit(self):
self.conn.commit()
def fetchall(self):
return self.cursor.fetchall()
def read_sql(self, query):
return pd.read_sql(query, self.conn)
def init(self, host, port, db, user, pwd):
self.conn = pymssql.connect(
server=host, database=db, user=user, password=pwd, port=port, charset="utf8"
)
self.cursor = self.conn.cursor()
def close(self):
self.conn.close()
self.cursor = None
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
Can anyone give me a help with this problem?
Thank you!
I'm working on a python program with functionality such as inserting and retrieving values from a postgres database using psycopg2. The issue is that every time I want to create a query I have to connect to the database so the following code snippet is present multiple times throughout the file:
# Instantiate Connection
try:
conn = psycopg2.connect(
user=userName,
password=passwrd,
host=hostAddr,
database=dbName
)
# Instantiate Cursor
cur = conn.cursor()
return cur
except psycopg2.Error as e:
print(f"Error connecting to Postgres Platform: {e}")
sys.exit(1)
My question is:
Is there a way I could just create a method to call every time I wish to connect to the database? I've tried creating one but I get a bunch of errors since variables cur and conn are not global
Could I just connect to the database once at the beginning of the program and keep the connection open for the entire time that the program is running? This seems like the easiest option but I am not sure if it would be bad practice (for reference the program will be running 24/7 so I assumed it would be better to only connect when a query is being made).
Thanks for the help.
You could wrap your own database handling class in a context manager, so you can manage the connections in a single place:
import psycopg2
import traceback
from psycopg2.extras import RealDictCursor
class Postgres(object):
def __init__(self, *args, **kwargs):
self.dbName = args[0] if len(args) > 0 else 'prod'
self.args = args
def _connect(self, msg=None):
if self.dbName == 'dev':
dsn = 'host=127.0.0.1 port=5556 user=xyz password=xyz dbname=development'
else:
dsn = 'host=127.0.0.1 port=5557 user=xyz password=xyz dbname=production'
try:
self.con = psycopg2.connect(dsn)
self.cur = self.con.cursor(cursor_factory=RealDictCursor)
except:
traceback.print_exc()
def __enter__(self, *args, **kwargs):
self._connect()
return (self.con, self.cur)
def __exit__(self, *args):
for c in ('cur', 'con'):
try:
obj = getattr(self, c)
obj.close()
except:
pass # handle it silently!?
self.args, self.dbName = None, None
Usage:
with Postgres('dev') as (con, cur):
print(con)
print(cur.execute('select 1+1'))
print(con) # verify connection gets closed!
Out:
<connection object at 0x109c665d0; dsn: '...', closed: 0>
[RealDictRow([('sum', 2)])]
<connection object at 0x109c665d0; dsn: '...', closed: 1>
It shouldn't be too bad to keep a connection open. The server itself should be responsible for closing connections it thinks have been around for too long or that are too inactive. We then just need to make our code resilient in case the server has closed the connection:
import pscyopg2
CONN = None
def create_or_get_connection():
global CONN
if CONN is None or CONN.closed:
CONN = psycopg2.connect(...)
return CONN
I have been down this road lots before and you may be reinventing the wheel. I would highly recommend you use a ORM like [Django][1] or if you need to interact with a database - it handles all this stuff for you using best practices. It is some learning up front but I promise it pays off.
If you don't want to use Django, you can use this code to get or create the connection and the context manager of cursors to avoid errors with
import pscyopg2
CONN = None
def create_or_get_connection():
global CONN
if CONN is None or CONN.closed:
CONN = psycopg2.connect(...)
return CONN
def run_sql(sql):
con = create_or_get_connection()
with conn.cursor() as curs:
return curs.execute(sql)
This will allow you simply to run sql statements directly to the DB without worrying about connection or cursor issues.
If I wrap your code-fragment into a function definition, I don't get "a bunch of errors since variables cur and conn are not global". Why would they need to be global? Whatever the error was, you removed it from your code fragment before posting it.
Your try-catch doesn't make any sense to me. Catching an error just to hide the calling site and then bail out seems like the opposite of helpful.
When to connect depends on how you structure your transactions, how often you do them, and what you want to do if your database ever restarts in the middle of a program execution.
My Question is what is the best way to maintain the single database connection in the entire application? Using Singleton Pattern? How?
Conditions that are needed to be taken care of:
In case of multiple requests, I should be using the same connection
In case connection is closed, create a new connection
If the connection has timed-out, on new request my code should create a new connection.
The driver to my Database is not supported by the Django ORM. And due to same driver related issues, I am using pyodbc to connect to the database. Right now I am having below class for creating and managing the DB connections:
class DBConnection(object):
def __init__(self, driver, serve,
database, user, password):
self.driver = driver
self.server = server
self.database = database
self.user = user
self.password = password
def __enter__(self):
self.dbconn = pyodbc.connect("DRIVER={};".format(self.driver) +\
"SERVER={};".format(self.server) +\
"DATABASE={};".format(self.database) +\
"UID={};".format(self.user) +\
"PWD={};".format(self.password) + \
"CHARSET=UTF8",
# "",
ansi=True)
return self.dbconn
def __exit__(self, exc_type, exc_val, exc_tb):
self.dbconn.close()
But the issue with this approach is that it will create new database connection for each query. What will be the better way to do it following singleton pattern? The way I can think of will hold the reference to the connection if the connection is closed. Something like:
def get_database_connection():
conn = DBConnection.connection
if not conn:
conn = DBConnection.connection = DBConnection.create_connection()
return conn
What will be the best way to achieve this? Any suggestion/ideas/examples?
PS: I was checking about using weakref which allows to create weak references to objects. I think it will be good idea to use weakref along with singleton pattern for storing the connection variable. This way I won't have to keep the connection alive when DB is not in use. What you guys say about this?
For now, I am going ahead with the singleton class approach. Anyone seeing the potential flaws in this, feel to mention them :)
DBConnector class for creating a connection
class DBConnector(object):
def __init__(self, driver, server, database, user, password):
self.driver = driver
self.server = server
self.database = database
self.user = user
self.password = password
self.dbconn = None
# creats new connection
def create_connection(self):
return pyodbc.connect("DRIVER={};".format(self.driver) + \
"SERVER={};".format(self.server) + \
"DATABASE={};".format(self.database) + \
"UID={};".format(self.user) + \
"PWD={};".format(self.password) + \
"CHARSET=UTF8",
ansi=True)
# For explicitly opening database connection
def __enter__(self):
self.dbconn = self.create_connection()
return self.dbconn
def __exit__(self, exc_type, exc_val, exc_tb):
self.dbconn.close()
DBConnection class for managing the connections
class DBConnection(object):
connection = None
#classmethod
def get_connection(cls, new=False):
"""Creates return new Singleton database connection"""
if new or not cls.connection:
cls.connection = DBConnector().create_connection()
return cls.connection
#classmethod
def execute_query(cls, query):
"""execute query on singleton db connection"""
connection = cls.get_connection()
try:
cursor = connection.cursor()
except pyodbc.ProgrammingError:
connection = cls.get_connection(new=True) # Create new connection
cursor = connection.cursor()
cursor.execute(query)
result = cursor.fetchall()
cursor.close()
return result
class DBConnector(object):
def __new__(cls):
if not hasattr(cls, 'instance'):
cls.instance = super(DBConnector, cls).__new__(cls)
return cls.instance
def __init__(self):
#your db connection code in constructor
con = DBConnector()
con1 = DBConnector()
con is con1 # output is True
Hope, above code will helpful.
class MyAddon(pyxbmct.AddonDialogWindow):
def __init__(self, title=''):
super(MyAddon, self).__init__(title)
self.mysql_connect()
self.populate()
def populate(self):
categories = self.read_data()
def read_data(self):
query = ("SELECT category FROM test")
cursor = connection.cursor()
categories = cursor.execute(query)
return categories
def mysql_connect(self):
global connection
try:
connection = mysql.connector.connect(**config).cursor()
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
xbmc.executebuiltin('Notification(Error!, Bad user name of password)')
elif err.errno == errorcode.ER_BAD_DB_ERROR:
xbmc.executebuiltin('Notification(Error!, Database does not exist)')
else:
xbmc.executebuiltin('Notification(Error!, {0})'.format(err))
I'm developing a Python add-on for Kodi. I get a Global name 'connection' is not defined error when trying to use a global variable for the database connection. I cannot read the global variable connection from the read_data function. I'm sure this is not a forward reference problem because I tested it that way.
The purpose of using a global variable for the connection is to reuse the connection in all the functions without creating a new connection every time.
It may be that Kodi does something funky with namespaces or your instances are pickled; when unpickled the global will be gone. Another problem with a global like this is that the connection might be lost at some point.
I'd restructure the code to have a method that returns a connection, and use that in all methods that require a connection. Make the connection method a classmethod and allow for the connection to be gone:
class MyAddonConnectionFailed(Exception): pass
def read_data(self):
query = ("SELECT category FROM test")
try:
conn = self.connect()
except MyAddonConnectionFailed:
# connection failed; error message already displayed
return []
cursor = conn.cursor()
categories = cursor.execute(query)
return categories
_connection = None
#classmethod
def connect(cls):
if cls._connection and cls._connection.open:
return cls._connection
try:
cls._connection = mysql.connector.connect(**config).cursor()
return cls._connection
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
xbmc.executebuiltin('Notification(Error!, Bad user name of password)')
elif err.errno == errorcode.ER_BAD_DB_ERROR:
xbmc.executebuiltin('Notification(Error!, Database does not exist)')
else:
xbmc.executebuiltin('Notification(Error!, {0})'.format(err))
raise MyAddonConnectionFailed
I'm raising an exception in the connect classmethod; you'll need to decide how you want to handle the case where your database is misconfigured or can't connect. Displaying an error message is not enough. You could still call self.connect() from the __init__ method to signal this problem early of course.
Using the code below leaves me with an open connection, how do I close?
import pyodbc
conn = pyodbc.connect('DRIVER=MySQL ODBC 5.1 driver;SERVER=localhost;DATABASE=spt;UID=who;PWD=testest')
csr = conn.cursor()
csr.close()
del csr
Connections have a close method as specified in PEP-249 (Python Database API Specification v2.0):
import pyodbc
conn = pyodbc.connect('DRIVER=MySQL ODBC 5.1 driver;SERVER=localhost;DATABASE=spt;UID=who;PWD=testest')
csr = conn.cursor()
csr.close()
conn.close() #<--- Close the connection
Since the pyodbc connection and cursor are both context managers, nowadays it would be more convenient (and preferable) to write this as:
import pyodbc
conn = pyodbc.connect('DRIVER=MySQL ODBC 5.1 driver;SERVER=localhost;DATABASE=spt;UID=who;PWD=testest')
with conn:
crs = conn.cursor()
do_stuff
# conn.commit() will automatically be called when Python leaves the outer `with` statement
# Neither crs.close() nor conn.close() will be called upon leaving the `with` statement!!
See https://github.com/mkleehammer/pyodbc/issues/43 for an explanation for why conn.close() is not called.
Note that unlike the original code, this causes conn.commit() to be called. Use the outer with statement to control when you want commit to be called.
Also note that regardless of whether or not you use the with statements, per the docs,
Connections are automatically closed when they are deleted (typically when they go out of scope) so you should not normally need to call [conn.close()], but you can explicitly close the connection if you wish.
and similarly for cursors (my emphasis):
Cursors are closed automatically when they are deleted (typically when they go out of scope), so calling [csr.close()] is not usually necessary.
You can wrap the whole connection in a context manager, like the following:
from contextlib import contextmanager
import pyodbc
import sys
#contextmanager
def open_db_connection(connection_string, commit=False):
connection = pyodbc.connect(connection_string)
cursor = connection.cursor()
try:
yield cursor
except pyodbc.DatabaseError as err:
error, = err.args
sys.stderr.write(error.message)
cursor.execute("ROLLBACK")
raise err
else:
if commit:
cursor.execute("COMMIT")
else:
cursor.execute("ROLLBACK")
finally:
connection.close()
Then do something like this where ever you need a database connection:
with open_db_connection("...") as cursor:
# Your code here
The connection will close when you leave the with block. This will also rollback the transaction if an exception occurs or if you didn't open the block using with open_db_connection("...", commit=True).
You might try turning off pooling, which is enabled by default. See this discussion for more information.
import pyodbc
pyodbc.pooling = False
conn = pyodbc.connect('DRIVER=MySQL ODBC 5.1 driver;SERVER=localhost;DATABASE=spt;UID=who;PWD=testest')
csr = conn.cursor()
csr.close()
del csr
You can define a DB class as below. Also, as andrewf suggested, use a context manager for cursor access.I'd define it as a member function.
This way it keeps the connection open across multiple transactions from the app code and saves unnecessary reconnections to the server.
import pyodbc
class MS_DB():
""" Collection of helper methods to query the MS SQL Server database.
"""
def __init__(self, username, password, host, port=1433, initial_db='dev_db'):
self.username = username
self._password = password
self.host = host
self.port = str(port)
self.db = initial_db
conn_str = 'DRIVER=DRIVER=ODBC Driver 13 for SQL Server;SERVER='+ \
self.host + ';PORT='+ self.port +';DATABASE='+ \
self.db +';UID='+ self.username +';PWD='+ \
self._password +';'
print('Connected to DB:', conn_str)
self._connection = pyodbc.connect(conn_str)
pyodbc.pooling = False
def __repr__(self):
return f"MS-SQLServer('{self.username}', <password hidden>, '{self.host}', '{self.port}', '{self.db}')"
def __str__(self):
return f"MS-SQLServer Module for STP on {self.host}"
def __del__(self):
self._connection.close()
print("Connection closed.")
#contextmanager
def cursor(self, commit: bool = False):
"""
A context manager style of using a DB cursor for database operations.
This function should be used for any database queries or operations that
need to be done.
:param commit:
A boolean value that says whether to commit any database changes to the database. Defaults to False.
:type commit: bool
"""
cursor = self._connection.cursor()
try:
yield cursor
except pyodbc.DatabaseError as err:
print("DatabaseError {} ".format(err))
cursor.rollback()
raise err
else:
if commit:
cursor.commit()
finally:
cursor.close()
ms_db = MS_DB(username='my_user', password='my_secret', host='hostname')
with ms_db.cursor() as cursor:
cursor.execute("SELECT ##version;")
print(cur.fetchall())
According to pyodbc documentation, connections to the SQL server are not closed by default. Some database drivers do not close connections when close() is called in order to save round-trips to the server.
To close your connection when you call close() you should set pooling to False:
import pyodbc
pyodbc.pooling = False
The most common way to handle connections, if the language does not have a self closing construct like Using in .NET, then you should use a try -> finally to close the objects. Its possible that pyodbc does have some form of automatic closing but here is the code I do just in case:
conn = cursor = None
try:
conn = pyodbc.connect('DRIVER=MySQL ODBC 5.1 driver;SERVER=localhost;DATABASE=spt;UID=who;PWD=testest')
cursor = conn.cursor()
# ... do stuff ...
finally:
try: cursor.close()
except: pass
try: conn.close()
except: pass