Flask SQLAlchemy does not close MySQL database connections - python

I have a Flask app using Flask-SQLAlchemy with a MySQL database where the db is defined as the following:
db.py:
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
main.py:
from db import db
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = "mysql+pymysql://" + \
DB_USERNAME + ":" + DB_PASSWORD + "#" + DB_HOST + "/" + DB_DATABASE
db.init_app(app)
#app.teardown_appcontext
def teardown_db(error):
db.session.close()
db.engine.dispose()
user.py:
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
I query my database using models using either db.engine.execute() to write raw SQL queries where required or use the integrated Flask-SQLAlchemy APIs for reading data such as User.query.filter_by().all().
I write new data into the db using the following:
new_user_entry = User(username = "abc", email = "abc#example.com")
db.session.add(new_user_entry)
db.session.commit()
I am monitoring my MySQL server using show processlist and I notice that the database connections keep increasing by 2 for every single request that comes my way. The database connections seem to reset only when I stop the Flask process. With time, the MySQL server throws the below error:
`sqlalchemy.exc.TimeoutError: QueuePool limit of size 10 overflow 10 reached, connection timed out, timeout 30 (Background on this error at: http://sqlalche.me/e/3o7r)`
I am serving the app using gunicorn and gevent/eventlet with 2 worker processes. I use python3.
Am I missing something here? I tried ending the db session and disposing the engine, but this does not seem to work.

I finally found a fix to the above problem.
I used the declarative model defined in here instead of following the quickstart documentation for Flask-SQLAlchemy given here.
The changed files are as follows:
db.py:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine(DB_URI, convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
import user
Base.metadata.create_all(bind=engine)
main.py:
from db import init_db, db_session
init_db()
#app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
user.py:
from sqlalchemy import Column, Integer, String
from data_models.db import Base
class User(Base):
id = db.Column(Integer, primary_key=True)
username = db.Column(String(80), unique=True, nullable=False)
email = db.Column(String(120), unique=True, nullable=False)
To query for records we could either use User.query.filter_by().all() or db_engine.execute().
To write new data into the database, we can use the following:
new_user_entry = User(username = "abc", email = "abc#example.com")
db_session.add(new_user_entry)
db_session.commit()

In case we need to close session before creating a new child process (what is recommended), this is what we should use:
db.session.remove()
db.engine.dispose()
Like
from multiprocessing import Process
from app import db
#app.route('/process/start/', methods = ["GET"])
def process_start():
db.session.remove()
db.engine.dispose()
p = Process(target = long_task)
p.start()
return 'started the long task'
def long_task():
'''
do long task
'''

Use with statement, there is a test:
def test():
with db.session() as dbss:
qss = models.WhateverModel.query.session
assert dbss == qss

Related

db.create_all() not generating db

I'm trying to test Flask with SQLAlchemy and I stumbeld accross this problem. First, I have to note that I read all of the related threads and none of them solves my problem. I have a problem that db.create_all() doesn't generate the table I defined. I have model class in file person.py:
from website import db
class Person(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String, nullable=False)
password = db.Column(db.String)
width = db.Column(db.Integer)
height = db.Column(db.Integer)
agent = db.Column(db.String)
user_data_dir = db.Column(db.String)
And in my website.py which is the file from where I launch the app:
from flask import Flask, jsonify, render_template, request
from flask_sqlalchemy import SQLAlchemy
# create the extension
db = SQLAlchemy()
def start_server(host, port, debug=False):
from person import Person
# create the app
app = Flask(__name__,
static_url_path='',
static_folder='web/static',
template_folder='web/templates')
# configure the SQLite database, relative to the app instance folder
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///database0.db"
# initialize the app with the extension
db.init_app(app)
print('initialized db')
print('creating tables...')
with app.app_context():
db.create_all()
db.session.add(Person(username="example33"))
db.session.commit()
person = db.session.execute(db.select(Person)).scalar()
print('persons')
print(person.username)
if __name__ == '__main__':
start_server(host='0.0.0.0', port=5002, debug=True)
I think the problem might be that the Person class is not importing properly, because when I put the class inside the start_server function it executes fine and creates the table, but I don't know why this is happening. I followed all the advice and imported it before everything, and also I share the same db object between the 2 files
There is probably a better way to do this but this is the only way I could get this to work. You need to create a models.py file or w.e you wanna call it. Then all your database stuff goes in there. The db engine, ALL your models and a function to initialize it all. The reason is, you are having import issues where Person is imported but not fully and so the db doesn't have it in its metadata.
models.py
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Person(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String, nullable=False)
password = db.Column(db.String)
width = db.Column(db.Integer)
height = db.Column(db.Integer)
agent = db.Column(db.String)
user_data_dir = db.Column(db.String)
# All other models
def initialize_db(app: Flask):
db.init_app(app)
with app.app_context():
db.create_all()
main.py
from flask import Flask
import models
def start_server(host, port, debug=False):
app = Flask(__name__)
# configure the SQLite database, relative to the app instance folder
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///database0.db"
# initialize the app with the extension
models.initialize_db(app)
db = models.db
with app.app_context():
db.session.add(models.Person(username="example33"))
db.session.commit()
person = db.session.execute(db.select(models.Person)).scalar()
print('persons')
print(person.username)
if __name__ == '__main__':
start_server(host='0.0.0.0', port=5002, debug=True)
I am reading the documentation,
which explains that the function will
Create all tables stored in this metadata.
That leads me to believe Person is not associated with the db metadata.
You mentioned
when I put the class inside the start_server function it ... creates the table
Your from person import Person is nice enough,
but I suspect we wanted a simple import person.
In many apps the idiom would be import models.
Failing that, you may be able to point
create_all in the right direction
with this optional parameter:
tables – Optional list of Table objects, which is a subset of the total tables in the MetaData
Please let us know
what technical approach worked for you.

SQLAlchemy doesn't correctly create in-memory database

Making an API using FastAPI and SQLAlchemy I'm experiencing strange behaviour when database (SQLite) is in-memory which doesn't occur when stored as file.
Model:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
Base = declarative_base()
class Thing(Base):
__tablename__ = "thing"
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String)
I create two global engine objects. One with database as file, the other as in-memory database:
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
args = dict(echo=True, connect_args={"check_same_thread": False})
engine1 = create_engine("sqlite:///db.sqlite", **args)
engine2 = create_engine("sqlite:///:memory:", **args)
Session1 = sessionmaker(bind=engine1)
Session2 = sessionmaker(bind=engine2)
I create my FastAPI app and a path to add an object to database:
from fastapi import FastAPI
app = FastAPI()
#app.get("/")
def foo(x: int):
with {1: Session1, 2: Session2}[x]() as session:
session.add(Thing(name="foo"))
session.commit()
My main to simulate requests and check everything is working:
from fastapi.testclient import TestClient
if __name__ == "__main__":
Base.metadata.create_all(engine1)
Base.metadata.create_all(engine2)
client = TestClient(app)
assert client.get("/1").status_code == 200
assert client.get("/2").status_code == 200
thing table is created in engine1 and committed, same with engine2. On first request "foo" was successfully inserted into engine1's database (stored as file) but second request raises "sqlite3.OperationalError" claiming "no such table: thing".
Why is there different behaviour between the two? Why does in-memory database claim the table doesn't exist even though SQLAlchemy logs show create table statement ran successfully and was committed?
The docs explain this in the following https://docs.sqlalchemy.org/en/14/dialects/sqlite.html#using-a-memory-database-in-multiple-threads
To use a :memory: database in a multithreaded scenario, the same connection object must be shared among threads, since the database exists only within the scope of that connection. The StaticPool implementation will maintain a single connection globally, and the check_same_thread flag can be passed to Pysqlite as False
It also shows how to get the intended behavior, so in your case
from sqlalchemy.pool import StaticPool
args = dict(echo=True, connect_args={"check_same_thread": False}, poolclass=StaticPool)

SQL Alchemy - Base.metadata.create_all(bind=engine) not creating tables in test db

I am using FastAPI to build an API backend for my URL shortener. I have the database connected to the API as a dependency. For the CRUD operations, I am using the SQL Alchemy ORM.
The code for my main app works perfectly fine and performs all the major CRUD operations I have mapped through the API endpoints.
The problem arises when I try to override the DB dependency to use a test db instead of my production db for testing purposes.
There are no errors associated with this override, however, the test database does not contain any of the tables that would be created when Base.metadata.creat_all(bind=engine) is called.
When running the tests using pytest, it gives me this error:
sqlalchemy.exc.ProgrammingError: (pymysql.err.ProgrammingError) (1146, "Table 'testurldb.urls' doesn't exist")
The code for my tests:
engine = create_engine(
"mysql+pymysql://{user}:{password}#{ip}:{port}/testurldb".format(
user=user, password=password, ip=ip, port=port
)
)
Session = sessionmaker(bind=engine)
Base.metadata.create_all(bind=engine)
def overrideDB():
db = Session()
try:
yield db
finally:
db.close()
app.dependency_overrides[get_db] = overrideDB
client = TestClient(app)
The module where Base is instantiated:
engine = create_engine(
"mysql+pymysql://{root}:{password}#{ip}:{port}/urldb".format(
root=root, password=password, ip=ip, port=port
)
)
SessionLocal = sessionmaker(bind=engine)
Base = declarative_base()
The table that extends Base:
class URL(Base):
__tablename__ = "urls"
short_url = Column(String(256), primary_key=True, unique=True, nullable=False)
long_url = Column(String(256), nullable=False, unique=True)
time = Column(String(256), nullable=False)
def __init__(self, short_url, long_url, time):
self.short_url = short_url
self.long_url = long_url
self.time = time
There seems to be nothing wrong with the imports, so I don't understand why it's not creating the tables.
And second, it might be useful information that my main production db already has the Tables created.
Create a file and import all models into it, at the end of all imports, put the Model import. When doing this, try to create the models again through Base.metadata
i hope it'll helps you
db.py
Base = declarative_base()
async def init_db():
try:
Base.metadata.create_all(bind=engine)
except Exception as e:
raise e
main.py
#app.on_event("startup")
async def on_startup():
await init_db()

Query an existing mysql table in database using flask-sqlalchemy

I have an flask app, using flask-slqalchemy to query a mysql database
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://root:password#localhost/abc'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
there is a table "users" in "abc" database and it is already populated with several hundred rows.
Now i need to import this existing table, rather than first defining it with db.Model
how do i call the table?
if i do this
from sqlalchemy import Table
USERS = Table('users', db.metadata,autoload=True, autoload_with=db.engine)
then i am not able to make a query like
USERS.query.filter_by(done=1).with_entities(USERS.name,USERS.country).paginate(page, 15, False)
it generates an error
AttributeError: 'Table' object has no attribute 'query'
because this is sqlchemy command, not flask-sqlchemy, i dont fully understand this.
I have to first define the table USERS like i am creating it for the first time :
class USERS(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.VARCHAR(500))
country = db.Column(db.VARCHAR(50))
def __init__(self, id, name, country):
self.id = id
self.name = name
self.country = country
def __repr__(self):
return self.id
only then i am able to use USERS to query the database through flask-sqlalchemy
How do i access the an existing table users using flask-sqlchemy in an flask app?
In sqlalchemy you should query table(s) with session if you want to query Table(). Because 'Table' object has no attribute 'query'. And you do not need to create table if it has existed, just use it. sqlalchemy existing database query
from sqlalchemy import Table, Column, String, create_engine, MetaData
from sqlalchemy.orm import sessionmaker
engine = create_engine()
metadata = MetaData()
test_ = Table('test', metadata,
Column('msg', String, primary_key=True),
Column('msg_', String)
)
Session = sessionmaker(bind=engine)
session = Session()
print(session.query(test_).filter_by(msg_ = "test").with_entities("msg","msg_").one())
# ('t', 'test')
In flask_sqlalchemy, it almost same as sqlalchemy did.
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = ""
db = SQLAlchemy(app)
class test(db.Model):
msg = db.Column(db.String, primary_key=True)
msg_ = db.Column(db.String)
def __init__(self, msg, msg_):
self.msg = msg
self.msg_ = msg_
def __repr__(self):
return "msg: {} msg_: {}".format(self.msg,self.msg_)
result = test.query.filter_by(msg_="test").one()
print(result)
print(result.msg,result.msg_)
'''
msg: t msg_: test
t test
'''

Pyramid Framework including models.py from addon to main application

I am trying to create a pyramid framework authentication addon/Plugin. The Plugin needs to have a database that stores user logins and other data, so if a user uses my addon his database must contain certain tables/models from the addon. e.g this Users table
class User(Base):
__tablename__ = 'User'
id = Column(Integer, primary_key=True)
username = Column(Text())
user_firstname = Column(Text())
user_lastname = Column(Text())
user_email = Column(Text())
user_password = Column(Text())
user_registrationdate = Column(DateTime())
user_email_key = Column(Text())
user_email_key_date_created = Column(DateTime())
user_email_approved = Column(Boolean())
user_email_sent = Column(Boolean())
user_active_account = Column(Boolean())
user_banned = Column(Boolean())
user_banned_reason = Column(Text())
I need this model to be included in the users main app, i am guessing i have to include something in my includeme shown below
def includeme(config):
config.include('pyramid_mako')
config.add_route('pyramid.admin', '/pyramid/admin')
#static views
config.add_static_view('assets', 'pyramidadmin:static/assets/')
config.scan('pyramidadmin.views')
but i have no Idea what to do. Is there a way i can merge the Base and DBSession from my addon to the base in the main application so that if a user runs ../bin/initialize_myapp_db the tables from myaddon and from the main app are all created?
I've had that same problem myself. I'm still working with trying to my stuff more pluggable, but with mixed results. The way I tackled this problem was to bind each declarative base class with the same engine. In my generic scaffolding I do this:
# create db engine
engine = engine_from_config(settings, 'sqlalchemy.')
# setup db.sessionmaker
settings['db.sessionmaker'] = DBSession
# bind session to engine
DBSession.configure(bind=engine)
# bind objects to engine
Base.metadata.bind = engine
from trumpet.models.base import Base as TrumpetBase
TrumpetBase.metadata.bind = engine
if settings.get('db.populate', 'False') == 'True':
from mslemon.models.main import make_test_data
import mslemon.models.misslemon
Base.metadata.create_all(engine)
TrumpetBase.metadata.create_all(engine)
#initialize_sql(engine)
You can use the following method:
import sqlalchemy
import sqlalchemy.orm as orm
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.ext.declarative import declarative_base
DBSession = None
def get_sa_base(engine):
sabase = sqlalchemy.ext.declarative.declarative_base()
sabase.metadata.reflect(engine)
return sabase
def includeme(config):
global DBSession
engine = sqlalchemy.engine_from_config(config.registry.settings)
if DBSession is None:
DBSession = orm.scoped_session(
orm.sessionmaker(extension=ZopeTransactionExtension()))
DBSession.remove()
DBSession.configure(bind=engine)
Base = get_sa_base(engine)
# example:
Base.metadata.tables.values()
An example can be found here.

Categories