I've been building a flask app and using flask-sqlalchemy and flask-migrate.
Lately I decided to replace the extension with plain sqlalchemy and alembic and I started to think what's the best place to store the db session object (sqla).
Right now I have the following:
Base = declarative_base()
def init_db_session(app, expire_on_commit=True):
"""
Initialize the database
"""
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], convert_unicode=True)
db_session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, expire_on_commit=expire_on_commit, bind=engine)
)
Base.query = db_session.query_property()
return db_session
def init_app(app):
"""
Flask app initialization and bootstrap
"""
init_logging(app)
app.celery = init_celery(app)
app.db_session = init_db_session(app)
but given some docs and examples online I'm wondering if using flask global g is any better
They both belong to the same context, I read about that in the docs and in the code but still can't get my head around the practical differences and the potential drawbacks of having it in the current_app compared to g
The flask documentation has a recommendation to declare the session in the module scope. This is also how I use it in my own code.
Base = declarative_base()
engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI'], convert_unicode=True)
db_session = scoped_session(
sessionmaker(
autocommit=False,
autoflush=False,
expire_on_commit=expire_on_commit,
bind=engine
)
)
Base.query = db_session.query_property()
def init_db():
""" not much to do here if migrations are handled else where. """
pass
def init_app(app):
"""
Flask app initialization and bootstrap
"""
init_logging(app)
app.celery = init_celery(app)
app.db_session = init_db_session(app)
Related
If I have an init.py file which looks as such:
from sqlalchemy import create_engine
import os
from sqlalchemy.orm import sessionmaker, scoped_session
from testserver.database.models.Base import Base
from sqlalchemy.ext.declarative import declarative_base
from flask_migrate import Migrate
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
def set_db_connection():
connection_string = 'sqlite:///' + os.path.join(BASE_DIR, 'testserver.db')
return connection_string
engine = create_engine(set_db_connection(), connect_args={'check_same_thread': False})
Session = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Set the custom base class
Base = declarative_base(cls=Base)
def init_app(app):
Base.metadata.create_all(engine)
Base.metadata.bind = engine
Session.configure(bind=engine)
app.session = scoped_session(Session)
# Export session for use in other classes
db_session = Session()
If I am to make an update to my existing models. Can I use the flask_migrate package to successfully run a migration? And if so how?
Author of Flask-Migrate here. Flask-Migrate needs Flask-SQLAlchemy or Alchemical (a still somewhat experimental package I built that uses the newer query functionality in SQLAlchemy 1.4).
If you use plain SQLAlchemy, then work with Alembic directly.
I have a simple function in Fast API app that gets called by a cron, something like this (note that this is not a get or post Fast API method but a simple function):
def read_user(user_id: int, db: Session = Depends(get_db)):
db_user = crud.get_user(db, user_id=user_id)
where get_db is:
from database import SessionLocal
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
And database.py is:
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "://url-to-db"
engine = create_engine(
SQLALCHEMY_DATABASE_URL
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
Since crud method takes db (object of SessionLocal yielded by get_db) as a param, how do I use it as a dependency injection with my function just like we use in GET or POST as shown in code read_user method above.
I want to write some py.test code to test 2 simple sqlalchemy ORM classes that were created based on this Tutorial. The problem is, how do I set a the database in py.test to a test database and rollback all changes when the tests are done? Is it possible to mock the database and run tests without actually connect to de database?
here is the code for my classes:
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import sessionmaker, relationship
eng = create_engine('mssql+pymssql://user:pass#host/my_database')
Base = declarative_base(eng)
Session = sessionmaker(eng)
intern_session = Session()
class Author(Base):
__tablename__ = "Authors"
AuthorId = Column(Integer, primary_key=True)
Name = Column(String)
Books = relationship("Book")
def add_book(self, title):
b = Book(Title=title, AuthorId=self.AuthorId)
intern_session.add(b)
intern_session.commit()
class Book(Base):
__tablename__ = "Books"
BookId = Column(Integer, primary_key=True)
Title = Column(String)
AuthorId = Column(Integer, ForeignKey("Authors.AuthorId"))
Author = relationship("Author")
I usually do that this way:
I do not instantiate engine and session with the model declarations, instead I only declare a Base with no bind:
Base = declarative_base()
and I only create a session when needed with
engine = create_engine('<the db url>')
db_session = sessionmaker(bind=engine)
You can do the same by not using the intern_session in your add_book method but rather use a session parameter.
def add_book(self, session, title):
b = Book(Title=title, AuthorId=self.AuthorId)
session.add(b)
session.commit()
It makes your code more testable since you can now pass the session of your choice when you call the method.
And you are no more stuck with a session bound to a hardcoded database url.
I add a custom --dburl option to pytest using its pytest_addoption hook.
Simply add this to your top-level conftest.py:
def pytest_addoption(parser):
parser.addoption('--dburl',
action='store',
default='<if needed, whatever your want>',
help='url of the database to use for tests')
Now you can run pytest --dburl <url of the test database>
Then I can retrieve the dburl option from the request fixture
From a custom fixture:
#pytest.fixture()
def db_url(request):
return request.config.getoption("--dburl")
# ...
Inside a test:
def test_something(request):
db_url = request.config.getoption("--dburl")
# ...
At this point you are able to:
get the test db_url in any test or fixture
use it to create an engine
create a session bound to the engine
pass the session to a tested method
It is quite a mess to do this in every test, so you can make a usefull usage of pytest fixtures to ease the process.
Below are some fixtures I use:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
#pytest.fixture(scope='session')
def db_engine(request):
"""yields a SQLAlchemy engine which is suppressed after the test session"""
db_url = request.config.getoption("--dburl")
engine_ = create_engine(db_url, echo=True)
yield engine_
engine_.dispose()
#pytest.fixture(scope='session')
def db_session_factory(db_engine):
"""returns a SQLAlchemy scoped session factory"""
return scoped_session(sessionmaker(bind=db_engine))
#pytest.fixture(scope='function')
def db_session(db_session_factory):
"""yields a SQLAlchemy connection which is rollbacked after the test"""
session_ = db_session_factory()
yield session_
session_.rollback()
session_.close()
Using the db_session fixture you can get a fresh and clean db_session for each single test.
When the test ends, the db_session is rollbacked, keeping the database clean.
I have a Flask app using Flask-SQLAlchemy with a MySQL database where the db is defined as the following:
db.py:
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
main.py:
from db import db
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = "mysql+pymysql://" + \
DB_USERNAME + ":" + DB_PASSWORD + "#" + DB_HOST + "/" + DB_DATABASE
db.init_app(app)
#app.teardown_appcontext
def teardown_db(error):
db.session.close()
db.engine.dispose()
user.py:
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
I query my database using models using either db.engine.execute() to write raw SQL queries where required or use the integrated Flask-SQLAlchemy APIs for reading data such as User.query.filter_by().all().
I write new data into the db using the following:
new_user_entry = User(username = "abc", email = "abc#example.com")
db.session.add(new_user_entry)
db.session.commit()
I am monitoring my MySQL server using show processlist and I notice that the database connections keep increasing by 2 for every single request that comes my way. The database connections seem to reset only when I stop the Flask process. With time, the MySQL server throws the below error:
`sqlalchemy.exc.TimeoutError: QueuePool limit of size 10 overflow 10 reached, connection timed out, timeout 30 (Background on this error at: http://sqlalche.me/e/3o7r)`
I am serving the app using gunicorn and gevent/eventlet with 2 worker processes. I use python3.
Am I missing something here? I tried ending the db session and disposing the engine, but this does not seem to work.
I finally found a fix to the above problem.
I used the declarative model defined in here instead of following the quickstart documentation for Flask-SQLAlchemy given here.
The changed files are as follows:
db.py:
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
engine = create_engine(DB_URI, convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
import user
Base.metadata.create_all(bind=engine)
main.py:
from db import init_db, db_session
init_db()
#app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
user.py:
from sqlalchemy import Column, Integer, String
from data_models.db import Base
class User(Base):
id = db.Column(Integer, primary_key=True)
username = db.Column(String(80), unique=True, nullable=False)
email = db.Column(String(120), unique=True, nullable=False)
To query for records we could either use User.query.filter_by().all() or db_engine.execute().
To write new data into the database, we can use the following:
new_user_entry = User(username = "abc", email = "abc#example.com")
db_session.add(new_user_entry)
db_session.commit()
In case we need to close session before creating a new child process (what is recommended), this is what we should use:
db.session.remove()
db.engine.dispose()
Like
from multiprocessing import Process
from app import db
#app.route('/process/start/', methods = ["GET"])
def process_start():
db.session.remove()
db.engine.dispose()
p = Process(target = long_task)
p.start()
return 'started the long task'
def long_task():
'''
do long task
'''
Use with statement, there is a test:
def test():
with db.session() as dbss:
qss = models.WhateverModel.query.session
assert dbss == qss
This is how I setup my database for an application (in Flask):
from sqlalchemy.engine import create_engine
from sqlalchemy.orm import scoped_session, create_session
from sqlalchemy.ext.declarative import declarative_base
engine = None
db_session = scoped_session(lambda: create_session(bind=engine,
autoflush=False, autocommit=False, expire_on_commit=True))
Base = declarative_base()
Base.query = db_session.query_property()
def init_engine(uri, **kwargs):
global engine
engine = create_engine(uri, **kwargs)
Base.metadata.create_all(bind=engine)
return engine
If I connect to a file database that has had tables created already, everything works fine, but using sqlite:///:memory: as a target database gives me:
OperationalError: (OperationalError) no such table: users u'DELETE FROM users' ()
when querying like so for ex.:
UsersTable.query.delete()
db_session.commit()
I am accessing this code from a unit test. What is the problem?
Thanks
Edit:
Working setup of the application:
app = Flask(__name__)
app.config.from_object(__name__)
app.secret_key = 'XXX'
# presenters
from presenters.users import users
# register modules (presenters)
app.register_module(users)
# initialize the database
init_engine(db)
The code you posted doesn't contain any table/class declaration. Are you sure that the declaration is done before init_engine() is called?