I am trying to write test for web service and I want to create a separate database for tests when you run them. It is my pytest fixture for realise it
#pytest.fixture(scope="session")
def db_engine():
engine = create_engine(SQLALCHEMY_DATABASE_URL)
if not database_exists:
create_database(engine.url)
Base.metadata.create_all(bind=engine)
yield engine
#pytest.fixture(scope="function")
def db(db_engine):
connection = db_engine.connect()
connection.begin()
db = Session(bind=connection)
yield db
db.rollback()
connection.close()
#pytest.fixture(scope="function")
def client(db):
app.dependency_overrides[get_db] = lambda: db
with TestClient(app) as c:
yield c
But app.dependecy_overrides[get_db] = lambda: db didnt work and requests continue to be sent to the main database and not the test one.
One of my endpoints
#router.get("/", response_model=List[RoomPayload])
def read(db: Session = Depends(get_db),
user=Depends(manager)):
q = db.query(Room).all()
if not q:
raise HTTPException(status_code=404, detail=f"Rooms not found")
return q
Related
In FastAPI I had the following function that I used to open and close a DB session:
def get_db():
try:
db = SessionLocal()
yield db
finally:
db.close()
And within the routes of my API I would do something like that:
#router.get("/")
async def read_all_events(user: dict = Depends(get_current_user), db: Session = Depends(get_db)):
logger.info("API read_all_events")
if user is None:
raise http_user_credentials_not_valid_exception()
return db.query(models.Events).all()
You can see that I am injectin the session in the api call.
So now i want to do something similar within a python function:
def do_something():
#get person data from database
#play with person data
#save new person data in database
#get cars data from database
So i am wondering if I should use the same approach than in FastAPI (i do not know how) or if i just should be openning and clossing the connection manually like that:
def do_something():
try:
db = SessionLocal()
yield db
#get person data from database
#play with person data
#save new person data in database
#get cars data from database
finally:
db.close()
Thanks
The usage of yield in this case is so that Depends(get_db) returns the db session instance, so that it can be used in the fastapi route, and as soon as the fastapi route returns response to user, the finally clause (db.close()) will be executed. This is good because every request will be using a separate db session, and db connections will be closed after every route response.
If you want to use the db session normally in a function, just get the db instance using db = SessionLocal(), and proceed to use the db instance in the function.
Example:
def do_something():
db = SessionLocal()
event = db.query(models.Events).first()
db.delete(event)
db.commit()
db.close()
Currently using sqlalchemy and fastapi for a production microservice hosted in AWS. The issue is that our production database secrets are refreshed every 30 days. Trying to automatically fetch the new secrets from secrets manager to reinitialize the database engine and session in the event of an error or OperationalError from sqlalchemy.
My question is where should this "reinitialization" occur?
utils/secret_mgr.py
import json
import logging
import boto3
from botocore.exceptions import ClientError
def get_secret(secret_id):
session = boto3.client('secretsmanager', region_name='us-east-1')
try:
response = session.get_secret_value(SecretId=secret_id)
except ClientError as e:
code = e.response['Error']['Code']
logging.exception(f'error:get_secret error_code:{code}')
raise e
else:
secret_str = response['SecretString']
secret = json.loads(secret_str)
return secret
utils/db.py
import logging
import os
from sqlalchemy.pool import QueuePool
from sqlalchemy.sql import text
from sqlmodel import SQLModel, Session, create_engine
from sqlalchemy.exc import OperationalError
from api.utils.scemgr import get_secret
engine = None
SECRET_NAME = os.environ.get('DB_SECRET_NAME')
SQLALCHEMY_DATABASE_URL = 'postgresql+psycopg2://{username}:{password}#{host}:{port}/{dbname}'
def get_database_uri():
secret = get_secret(SECRET_NAME)
return SQLALCHEMY_DATABASE_URL.format(
username=secret['username'],
password=secret['password'],
host=secret['host'],
port=secret['port'],
dbname=secret['dbname'],
)
def get_engine():
global engine
if engine:
return engine
conn_str = get_database_uri()
engine = create_engine(
conn_str,
echo=True,
poolclass=QueuePool,
pool_pre_ping=True,
# pool_size=15,
# max_overflow=5,
echo_pool="debug"
)
return engine
engine = get_engine()
#
# class SessionManager:
# def __init__(self):
# self.db = sessionmaker(bind=engine, autocommit=True, expire_on_commit=False)
#
# def __enter__(self):
# return self.db
#
# def __exit__(self, exc_type, exc_val, exc_tb):
# self.db.close()
def get_session():
with Session(engine) as session:
try:
yield session
session.commit()
except Exception as exc:
session.rollback()
raise exc
finally:
session.close()
def init_db_sqlalchemy():
SQLModel.metadata.create_all(engine)
def fetch(db: Session, query, *args, **kwargs):
try:
stmt = text(query)
result = db.execute(stmt, *args, **kwargs)
db.commit()
return result
except (Exception, OperationalError) as err:
logging.exception(f"error_code={err} function_name={fetch.__name__}")
finally:
db.close()
import os
import time
from uuid import uuid4
import uvicorn
from fastapi import FastAPI, Request, Depends
from fastapi.encoders import jsonable_encoder
from api.routes.info import info, health
from api.utils.db import init_db_sqlalchemy, get_session, fetch
app = FastAPI(
)
app.include_router(info, prefix="/info")
app.include_router(health, prefix="/health")
#app.middleware("http")
async def add_logging_and_process_time(request: Request, call_next):
start_time = time.time()
request_id = str(uuid4().hex)
response = await call_next(request)
process_time = time.time() - start_time
process_time = str(round(process_time * 1000))
response.headers["X-Process-Time-MS"] = process_time
log_msg = f"request_id={request_id} service=my-svc url={request.url} host={request.client.host} " \
f"port={request.client.port} processing_time_ms={process_time} env={os.environ.get('APP_ENV')} " \
f"version=v1 pid={os.getpid()} region={os.environ.get('REGION')} "
logger.info(log_msg)
return response
#app.on_event('startup')
def startup():
init_db_sqlalchemy()
#app.get('/getDatabaseInfo')
def get_db_data_example(db: Session = Depends(get_session)):
try:
records = fetch(db, DATABASE_QUERY).all()
return jsonable_encoder(records)
except Exception as err:
logger.exception(f"function_name=getDatabaseInfo error={err}")
if __name__ == '__main__':
uvicorn.run(app, host="0.0.0.0", port=8050)
I currently initialize the database on startup. In the event of a database connection error, where should we reinitialize the database? i.e. pull the new credentials from utils/secret_mgr.py and recreate the database engine in utils/db.py.
Given the above information a few questions:
Should engine be a global object if we need to reinit every 30 days?
If get_session fails, a dependency injection to the get request, fails it will close the session and add it back to the connection pool. If we are using a connection pool and one of the connections become invalidated, it will in turn invalidate all connections in the pool. This is okay, should happen. Where should we recreate the database engine?
What is the proper way to do this given the constraints above?
I have an issue when I test my API, the DB session at the tests isn't up to date when performing post request.
For example:
#pytest.fixture(scope="module")
def db(engine, tables, users) -> Generator:
"""Returns an sqlalchemy session, and after the test tears down everything properly."""
try:
db = TestingSessionLocal()
yield db
finally:
db.close()
def test_test(client: TestClient, db: Session):
item_id = 1
params = {
"item_id": item_id,
}
r = client.post("/endpoint",json=params)
db2: SessionLocal = TestingSessionLocal() # TEMP SOLUTION
# item_from_db = service.table.get(db, item_id) # TODO: make it work!!
item_from_db = service.event.get(db2, item_id)
db2.close()
I see the new item at my DB at run time, but I cant get it with the fixture.
Only when I create a new connection to DB, I manage to get the new item I added.
If I want to use database while processing a request, I make a Dependency Injection like this:
#app.post("/sample_test")
async def sample_test(db: Session = Depends(get_db)):
return db.query(models.User.height).all()
But I cannot do it with events like this:
#app.on_event("startup")
async def sample_test(db: Session = Depends(get_db)):
return db.query(models.User.height).all()
because starlette events don't support Depends.
This is my get_db() function:
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
just like in FastAPI manual (https://fastapi.tiangolo.com/tutorial/sql-databases/).
How can I access get_db() inside my event function, so I can work with a Session?
I've tried:
#app.on_event("startup")
async def sample_test(db: Session = Depends(get_db)):
db = next(get_db())
return db.query(models.User.height).all()
but it doesn't work.
I use MSSQL, if it's important.
Instead of using a dependency you can import the SessionLocal you've created as shown in the FastAPI manual and use a contextmanager to open and close this session:
#app.on_event("startup")
async def sample_test():
with SessionLocal() as db:
return db.query(models.User.height).all()
I'm trying to write unit tests for a module within a Flask app that uses it's own database connection.
The module opens its connection thus:
engine = create_engine(SQLALCHEMY_DATABASE_URI)
Session = sessionmaker(bind=engine)
session = Session()
and I then use session throughout the module.
My unit test has a fixture on conftest.py to create a new session:
#pytest.yield_fixture(scope='module')
def test_session(app):
"""
Creates a new database session for a test. Note you must use this fixture
if your test connects to db.
Here we not only support commit calls but also rollback calls in tests,
:coolguy:.
"""
connection = db.engine.connect()
transaction = connection.begin()
options = dict(bind=connection, binds={})
db_session = db.create_scoped_session(options=options)
db_session.begin_nested()
# session is actually a scoped_session
# for the `after_transaction_end` event, we need a session instance to
# listen for, hence the `session()` call
#sqlalchemy.event.listens_for(db_session(), 'after_transaction_end')
def restart_savepoint(sess, trans):
if trans.nested and not trans._parent.nested:
db_session.expire_all()
db_session.begin_nested()
db.session = db_session
yield db_session
db_session.remove()
transaction.rollback()
connection.close()
and in my test I do this:
def test_schedule_orders_by_last_update(test_session, create_test_user):
vendor = test_session.query(Vendors).filter(Vendors.name == 'Melie Bianco').first()
amazon = AmazonRequests(vendor)
amazon.schedule_orders_by_last_update()
result = test_session.query(AmazonReportRequests).filter(AmazonReportRequests.vendor == vendor).all()
assert len(result) == 1
assert result.vendor.name == vendor.name
My problem is that when I run the test it always ends with the following error:
self = <sqlalchemy.orm.session.Session object at 0x1104fab50>, state = <sqlalchemy.orm.state.InstanceState object at 0x110863f10>, obj = <AmazonReportRequests None>
def _before_attach(self, state, obj):
if state.session_id == self.hash_key:
return False
if state.session_id and state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
"(this is '%s')" % (state_str(state),
> state.session_id, self.hash_key))
E InvalidRequestError: Object '<AmazonReportRequests at 0x110863e90>' is already attached to session '2' (this is '1')
Shouldn't a query just retrieve the row from the database and ignore the other session?