selecting specific mysql connection from multiple conection - python

code:
def before_request():
try:
g.db = mysql.connector.connect(user=user1,password=pass1, host=hos1,
database=testdb1)
g.db1 = mysql.connector.connect(user=user2,password=pass2, host=host2,
database=testdb2)
#app.route('/user/<db>', methods=['POST'])
def insert(db):
body=request.json
try:
#Http request for inserting data into testdb1:"curl 'http://localhost:5000/user/**testdb1**' testdb1 is the database".
same way ##Http request for inserting data into testdb2:"curl 'http://localhost:5000/user/**testdb2**' testdb2 is 2nd database".
#code for inserting data into database(testdb1/testdb2) according to http request.
Everything is working fine but the problem is when i am sending (http request)
for lets say for inserting data into testdb1.then the connection to both of the databases get opened.But i want that only the requested database connection should get opened not both.
Question:1.What changes i need to make that only one connection get opened?
2.Is it possible that to have g.db as a connection variable ,which store both connection and when http request comes for the database.Let say for testdb1 then only that connection get opened?

You should set do it like this. when you start your server, you make two global connection for doing sth or like that:
For example,
conn1 = None
conn2 = None
def connection1():
global conn1
if conn1 is None:
conn1 = ...
return conn1
if u want to sql,
from initConnection import conn1, conn2
conn1.execute(sql) or conn2.execute()
Only one connection1 or connection2 is using.

Related

How to yield a db connection in a python sqlalchemy function similar to how it is done in FastAPI?

In FastAPI I had the following function that I used to open and close a DB session:
def get_db():
try:
db = SessionLocal()
yield db
finally:
db.close()
And within the routes of my API I would do something like that:
#router.get("/")
async def read_all_events(user: dict = Depends(get_current_user), db: Session = Depends(get_db)):
logger.info("API read_all_events")
if user is None:
raise http_user_credentials_not_valid_exception()
return db.query(models.Events).all()
You can see that I am injectin the session in the api call.
So now i want to do something similar within a python function:
def do_something():
#get person data from database
#play with person data
#save new person data in database
#get cars data from database
So i am wondering if I should use the same approach than in FastAPI (i do not know how) or if i just should be openning and clossing the connection manually like that:
def do_something():
try:
db = SessionLocal()
yield db
#get person data from database
#play with person data
#save new person data in database
#get cars data from database
finally:
db.close()
Thanks
The usage of yield in this case is so that Depends(get_db) returns the db session instance, so that it can be used in the fastapi route, and as soon as the fastapi route returns response to user, the finally clause (db.close()) will be executed. This is good because every request will be using a separate db session, and db connections will be closed after every route response.
If you want to use the db session normally in a function, just get the db instance using db = SessionLocal(), and proceed to use the db instance in the function.
Example:
def do_something():
db = SessionLocal()
event = db.query(models.Events).first()
db.delete(event)
db.commit()
db.close()

How to keep Snowflake connection alive in python

I have stored my database in the Snowflake server and using https://pypi.org/project/snowflake-connector-python/ I am accessing the database and doing database queries from flask server as below:
ctx = snowflake.connector.connect(
user='username',
password='pass',
account='account',
client_session_keep_alive=True
)
cs = ctx.cursor()
try:
cs.execute("SELECT current_version()")
one_row = cs.fetchone()
print("Successfully connected to snowflake version: {}".format(one_row[0]))
cs.close()
except Exception as e:
print("Snowflake connection error: {}".format(e))
cs.close()
I have defined the snowflake connection session as a global ctx variable to be accessible from any flask api function.
After starting the flask server, everything works fine but if no api calls are made for continuously few hours then it throws an error 'snowflake.connector.errors.ProgrammingError: 390114 (08001): Authentication token has expired. The user must authent
icate again.'
As you can see i have kept the 'client_session_keep_alive=True' parameter in the snowflake connect api to keep the session alive but still somehow this fails.
I explored about this issue but did not get any conclusive information.
So i want to know how can i keep the database connection session alive or do I have to create new connection session for each query?
Any suggestions will be very helpfull.

cx_oracle persistent connection on flask+apache+mod_wsgi

I have deployed my flask application on apache+mod_wsgi
I'm using WSGI Daemon mode and have this config in apache httpd.conf:
WSGIDaemonProcess flask_test user=apache group=apache threads=20
For simplicity lets say for each request, I need to execute a query to insert data into Oracle DataBase.
So in my flask application, I have done something like this:
# DB.py
import cx_Oracle
class DB:
def __init__(self, connection_string):
self.conn = cx_Oracle.connect(connection_string, threaded=True)
def insert(query):
cur = self.conn.cursor()
cur.execute(query)
cur.close()
self.conn.commit()
# flask_app.py
from flask import Flask, request, jsonify
from DB import DB
app = Flask(__name__)
db = DB(connection_string)
#app.route("/foo", methods=["POST"])
def foo():
post_data = request.get_json()
# parse above data
# create insert query with parsed data values
db.insert(insert_processed_data_QUERY)
# generate response
return jsonify(response)
When I start the apache+mod_wsgi server, the DB object is created and the DB connection is established.
For all incoming requests, the same DB object is used to execute insert query.
So far this works fine for me. However my concern is that if there are no requests for a long period of time, the DB connection might time out, and then my app will not work for a new request when it comes.
I've been monitoring my application and have observed that the DB connection persists for hours and hours. But I'm pretty sure it might timeout if there is no request for 2-3 days(?)
What would be the correct way to ensure that the DB connection will stay open forever? (i.e. as long as the apache server is running)
Use a pool instead of a standalone connection. When you acquire a connection from the pool it will check to see if the connection is no longer valid and automatically dispense a new one. So you need something like this:
pool = cx_Oracle.SessionPool(user=user, password=password, dsn=dsn, min=1,
max=2, increment=1)
Then in your code you need to do the following:
with pool.acquire() as connection:
# do what you need to do with the connection

Is it required to close python SQL connection in Flask app?

I've the code like this below. Is it necessary to close mysql connection because whenever my home page is requested, a new sql connection will be created?
I randomly get Connection Limit error. But I'm not sure if the DB connection is the problem.
#app.route("Home", methods=["GET"])
def get_home_page():
db = mysql.connect(host, user, password, db_name, charset='utf8', use_unicode=True)
...
It is good practice to close the connection. You can put your codes inside a try..finally block.
#app.route("Home", methods=["GET"])
def get_home_page():
db = mysql.connect(host, user, password, db_name, charset='utf8', use_unicode=True)
try:
... do something ...
finally:
db.close()
from my experience, close session after use it take significant difference amount of time to response in api whom i've experienced in flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
try:
db.session.query( Any Model ...
except:
finally:
db.close_all_sessions

SQLAlchemy and (2006, 'MySQL server has gone away')

So,a after reading every page around for the last 1h i'm still not able to find a solution for this problem.
This is my connection.py file:
from sqlalchemy import create_engine, Table, Column, String, Integer, MetaData
from sqlalchemy.sql import select
class DatabaseConnectionManager:
def __init__(self):
self.host = 'localhost:3306'
self.db = 'xxx'
self.user = 'xxx'
self.pwd = 'xxx'
self.connect_string = 'mysql://{u}:{p}#{s}/{d}'.format(u=self.user,
p=self.pwd,
s=self.host,
d=self.db)
self.metadata = MetaData()
self.engine = create_engine(self.connect_string, echo=False,
pool_size=100, pool_recycle=3600)
self.conn = self.engine.connect()
def insert_table(self, inputs):
self.conn.execute(self.tbl_auctions().insert(), inputs)
# Update 1 : conn.close() removed.
#self.conn.close()
def select_servers(self):
try:
s = select([self.tbl_servers()])
result = self.conn.execute(s)
except:
raise
else:
return result
And this is my bulk_inserter.py file:
import sys
import time
import json
from connector import DatabaseConnectionManager
def build_auctions_list():
server_list = []
db = DatabaseConnectionManager()
# Loop over regions
for server, env in db.select_servers_table():
request_auction_data = json.loads(dump_auction_url(region, realm))
for auction in request_auction_data:
auction_list.append(auction)
db.insert_table(server_list)
if __name__ == '__main__':
start = time.time()
build_auctions_list()
print time.time() - start
So, the problem happens when i try to insert all the bulk data using db.insert_table(server_list) for 2 or more servers returned by the loop for server, env in db.select_servers_table():
But, if the result on that loop is for only one server, the flows happen normally without problems.
So, resuming :
this program retrieve a list of servers from a db table and dumps the json data into the db.
Bulk insert performs well if only one server is retrieved.
if two or more servers , the following error happens:
sqlalchemy.exc.OperationalError: (OperationalError) (2006, 'MySQL server has gone away')
Anyoen have any idea what could be happening? I allready incresed the timeout and buffer size on mysql config file. So i'm not sure what the problem could be...
Update #1 It seems i cant bulk insert array with more than 50k values. I'm still trying to figure out how to do it.

Categories