Load once a ML model in celery and fastapi - python

I'm executing my FastAPI running first:
python myfast.py
and then:
celery -A myfast.celery worker
Inside my app I have a class where I load my models, called create_models. I need to call this function once and share the variable between both instances to avoid loading in GPU memory twice the same models (It's a lot of GB). How can I achieve this?
The initial python file (myfast.py) is:
from fastapi import FastAPI
from models import create_models
from myrouter import myrouter
import uvicorn
from celery_object.app import celery_app
app = FastAPI()
app.celery_app = celery_app
celery = app.celery_app
app.include_router(myrouter)
if __name__ == "__main__":
print("Starting API server on port {}".format(API_PORT))
uvicorn.run("myfast:app", host='localhost', port=8839, reload=False, access_log=False)
The file for loading the model (models.py) is:
class IA_models():
def __init__(self):
super().__init__()
self.model = None
def __call__(self, *args, **kwargs):
if self.model == None:
self.model == True
# Do stuff to load models
The file for the endpoints (myrouter.py):
from fastapi import APIROUTER
from models import IA_models
internal = APIRouter(tags=['internal'], responses={404: {"description": "Not found"}})
model = IA_models()
model()
# endpoints
then I have other stuff for celery, but the tasks from celery are always called from endpoints, and model is passed to the task. I have some endpoints that use directly the model, and others that pass the model to a celery task and then the task use it.

Related

How to preserve Flask app context across Celery and SQLAlchemy

I'm building trying to learn Flask with a proof of concept Flask app, that takes a JSON payload, and uses SQLAlchemy to write it to a DB. I'm using celery to manage the write tasks.
The app is structured
|-app.py
|-project
|-__init__.py
|-celery_utils.py
|-config.py
|-users
|-__init_.py
|-models.py
|-tasks.py
app.py builds the flask app and celery instance.
app.py
from project import create_app, ext_celery
app = create_app()
celery = ext_celery.celery
#app.route("/")
def alive():
return "alive"
/project/__init__.py is the application factory for the flask app. It instantiates the extensions, links everything together, and registers the blueprints.
/project/init.py
import os
from flask import Flask
from flask_celeryext import FlaskCeleryExt
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from project.celery_utils import make_celery
from project.config import config
# instantiate extensions
db = SQLAlchemy()
migrate = Migrate()
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app(config_name=None):
if config_name is None:
config_name = os.environ.get("FLASK_CONFIG", "development")
# instantiate the app
app = Flask(__name__)
# set config
app.config.from_object(config[config_name])
# set up extensions
db.init_app(app)
migrate.init_app(app, db)
ext_celery.init_app(app)
# register blueprints
from project.users import users_blueprint
app.register_blueprint(users_blueprint)
# shell context for flask cli
#app.shell_context_processor
def ctx():
return {"app": app, "db": db}
return app
/project/celery_utils.py manages the creation of the celery instances
/project/celery_utils.py
from celery import current_app as current_celery_app
def make_celery(app):
celery = current_celery_app
celery.config_from_object(app.config, namespace="CELERY")
return celery
In the users dir, I'm trying to manage the creation of a basic user with celery task management.
'/project/users/init.py` is where I create the blueprints and routes.
/project/users/init.py
from flask import Blueprint, request, jsonify
from .tasks import divide, post_to_db
users_blueprint = Blueprint("users", __name__, url_prefix="/users", template_folder="templates")
from . import models, tasks
#users_blueprint.route('/users', methods=['POST'])
def users():
request_data = request.get_json()
task = post_to_db.delay(request_data)
response = {"id": task.task_id,
"status": task.status,
}
return jsonify(response)
#users_blueprint.route('/responses', methods=['GET'])
def responses():
request_data = request.get_json()
result = AsyncResult(id=request_data['id'])
response = result.get()
return jsonify(response)
/project/users/models.py is a simple User model - however, it does manage to successfully remain in the context of the flask app if created from the flask app cli.
/project/users/models.py
from project import db
class User(db.Model):
"""model for the user object"""
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(128), unique=True, nullable=False)
email = db.Column(db.String(128), unique=True, nullable=False)
def __init__(self, username, email, *args, **kwargs):
self.username = username
self.email = email
Finally, /project/users/tasks.py is where I handle the celery tasks for this dir.
/project/users/tasks.py
from celery import shared_task
from .models import User
from project import db
#shared_task()
def post_to_db(payload):
print("made it here")
user = User(**payload)
db.session.add(user)
db.session.commit()
db.session.close()
return True
The modules work, but as soon as I wire it all up and hit the endpoint with a JSON payload, I get the error message:
RuntimeError: No application found. Either work inside a view function or push an application context. ...
I have tried to preserve the app context in tasks.py by:
...
from project import db, ext_celery
#ext_celery.shared_task()
def post_to_db(payload):
...
...
from project import db, ext_celery
#ext_celery.task()
def post_to_db(payload):
...
These error with: TypeError: exceptions must derive from BaseException
I've tried pushing the app context
...
from project import db
from app import app
#shared_task()
def post_to_db(payload):
with app.app_context():
...
This also errors with: TypeError: exceptions must derive from BaseException
I've tried importing celery from the app itself
...
from project import db
from app import celery
#celery.task()
def post_to_db(payload):
...
This also errors with: TypeError: exceptions must derive from BaseException
Any suggestions gratefully received. There's a final piece of the puzzle I'm missing, and it's very frustrating.
With thanks to snakecharmerb
I had to add ContextTask to the make_celery() function in /project/celery_utils.py
from celery import current_app as current_celery_app
def make_celery(app):
celery = current_celery_app
celery.config_from_object(app.config, namespace="CELERY")
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
And then a few tweaks in /project/users/tasks.py
from celery import shared_task
from .models import User
from project import db
#shared_task()
def post_to_db(payload):
user = User(**payload)
db.session.add(user)
db.session.commit()
db.session.close()
return True
Now I can see the user in the database, and my message queue is progressing as expected.

How to separate umongo's object document models from the main web app or how to avoid calling #instance.register where document models are defined?

While creating a web app using Flask or FastAPI, there would be a main.py file that basically instantiates and runs everything. I think that is also the right place for the database connections and initialization. So ideally I'd like to have a separate model.py file that basically just has the object document mapping definitions and nothing else.
Is it possible to do something like that in umongo?
I mean we need to call #instance.register above every object document map class. But if that is in a separate file and the DB is not initialized there, then in that file there are no instances. The instance would be declared in the main.py file.
For example, when you use Tortoise, it allows you to pass the whole model.py file as a module and register it with FastAPI like the following -
register_tortoise(
app,
db_url=os.environ.get("DATABASE_URL"),
modules={"models": ["models.model"]}, #model -> model.py which has all the class definitions
generate_schemas=True
)
For demo, you can have the following as model.py file contents -
from umongo import Document
from umongo.fields import StringField, URLField, DateTimeField
from datetime import datetime
class WebData(Document):
url = URLField()
summary = StringField()
created_at = DateTimeField(missing=datetime.now)
def __str__(self):
return self.url
And the following main.py file that uses FastAPI -
from fastapi import FastAPI
from pymongo import MongoClient
from umongo import Instance
#db = MongoClient().test
#instance = Instance(db)
app = FastAPI()
#app.get('/ping')
async def pong():
return {'ping': 'pong'}
if __name__ == "__main__":
import uvicorn
uvicorn.run("app.main:app", host="0.0.0.0", port=8000, reload=True)
You can instantiate the instance at import but pass it a DB connection at app init.
common.py
from umongo.frameworks import PyMongoInstance
instance = PyMongoInstance()
model.py
from umongo import Document
from .common import instance
#instance.register
class MyDocument(Document)
init.py
from .common import .instance
import .model
def create_app():
database = MongoClient().test
instance.init(database)
...
(Note: in umongo 3 beta, instance.init is renamed to instance.set_db.)

How do I get the application context in a Blueprint, but not in a request?

I am attempting to convert a collection of Flask apps to a single app with several Blueprints.
In one of my apps, I have a task that runs periodically in the background, not related to a request. It looks something like this:
import apscheduler.schedulers.background
import flask
app = flask.Flask(__name__)
app.config['DATABASE']
scheduler = apscheduler.schedulers.background.BackgroundScheduler()
scheduler.start()
def db():
_db = flask.g.get('_db')
if _db is None:
_db = get_db_connection_somehow(app.config['DATABASE'])
flask.g._db = _db
return _db
#scheduler.scheduled_job('interval', hours=1)
def do_a_thing():
with app.app_context():
db().do_a_thing()
When I convert this app to a Blueprint, I lose access to the app object and I can't figure out how to create an application context when I need one. This is what I tried:
import apscheduler.schedulers.background
import flask
bp = flask.Blueprint('my_blueprint', __name__)
scheduler = apscheduler.schedulers.background.BackgroundScheduler()
scheduler.start()
def db():
_db = flask.g.get('_db')
if _db is None:
_db = get_db_connection_somehow(flask.current_app.config['DATABASE'])
flask.g._db = _db
return _db
#bp.record
def record(state):
with state.app.app_context():
flask.g._app = state.app
#scheduler.scheduled_job('interval', hours=1)
def do_a_thing():
with flask.g._app.app_context():
db().do_a_thing()
The error I get is:
RuntimeError: Working outside of application context.
So, how can I get the application context in a blueprint but outside a request?
I solved this problem with the following changes. First, I set up a scheduler object on my Flask app:
app = flask.Flask(__name__)
app.scheduler = apscheduler.schedulers.background.BackgroundScheduler()
app.scheduler.start()
Next, I changed the function that runs my background task to accept the app as an argument, so I could read the database connection information from app.config:
def do_a_thing(app: flask.Flask):
db = get_db_connection_somehow(app.config['DATABASE'])
db.do_a_thing()
Finally, I set up the scheduled job in Blueprint.record():
#bp.record
def record(state):
state.app.scheduler.add_job(do_a_thing, trigger='interval', args=[state.app], hours=1)

In a Flask App, where should Celery be instantiated?

I have a Flask app, which is a very basic app with a POST handler and some DB insertions. The DB insertions are set as tasks using Celery. If I put my Celery instance creation and tasks definition in tasks.py file, and call the functions from my main.py file (which also has the Flask app creation), I get an out of context error. The tasks in the tasks.py file in turn call a DB class that does the DB insertions. How do I properly create the Celery instance and make sure it has the Flask context?
This is how the structure roughly resembles:
main.py = Flask app creation, routes handling and tasks.delay calls.
tasks.py = Celery instance creation and task definitions.
DB = Inserts.
I want everything to work in the same context.
The Flask docs suggest subclassing Celery's Task class and wrapping task execution in a Flask app context. So in task.py if your Flask app instance is named app and your Celery instance is named celery, you would replace celery's Task attribute with the new subclass:
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask

How to use Flask-SQLAlchemy in a Celery task

I recently switch to Celery 3.0. Before that I was using Flask-Celery in order to integrate Celery with Flask. Although it had many issues like hiding some powerful Celery functionalities but it allowed me to use the full context of Flask app and especially Flask-SQLAlchemy.
In my background tasks I am processing data and the SQLAlchemy ORM to store the data. The maintainer of Flask-Celery has dropped support of the plugin. The plugin was pickling the Flask instance in the task so I could have full access to SQLAlchemy.
I am trying to replicate this behavior in my tasks.py file but with no success. Do you have any hints on how to achieve this?
Update: We've since started using a better way to handle application teardown and set up on a per-task basis, based on the pattern described in the more recent flask documentation.
extensions.py
import flask
from flask.ext.sqlalchemy import SQLAlchemy
from celery import Celery
class FlaskCelery(Celery):
def __init__(self, *args, **kwargs):
super(FlaskCelery, self).__init__(*args, **kwargs)
self.patch_task()
if 'app' in kwargs:
self.init_app(kwargs['app'])
def patch_task(self):
TaskBase = self.Task
_celery = self
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
if flask.has_app_context():
return TaskBase.__call__(self, *args, **kwargs)
else:
with _celery.app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
self.Task = ContextTask
def init_app(self, app):
self.app = app
self.config_from_object(app.config)
celery = FlaskCelery()
db = SQLAlchemy()
app.py
from flask import Flask
from extensions import celery, db
def create_app():
app = Flask()
#configure/initialize all your extensions
db.init_app(app)
celery.init_app(app)
return app
Once you've set up your app this way, you can run and use celery without having to explicitly run it from within an application context, as all your tasks will automatically be run in an application context if necessary, and you don't have to explicitly worry about post-task teardown, which is an important issue to manage (see other responses below).
Troubleshooting
Those who keep getting with _celery.app.app_context(): AttributeError: 'FlaskCelery' object has no attribute 'app' make sure to:
Keep the celery import at the app.py file level. Avoid:
app.py
from flask import Flask
def create_app():
app = Flask()
initiliaze_extensions(app)
return app
def initiliaze_extensions(app):
from extensions import celery, db # DOOMED! Keep celery import at the FILE level
db.init_app(app)
celery.init_app(app)
Start you celery workers BEFORE you flask run and use
celery worker -A app:celery -l info -f celery.log
Note the app:celery, i.e. loading from app.py.
You can still import from extensions to decorate tasks, i.e. from extensions import celery.
Old answer below, still works, but not as clean a solution
I prefer to run all of celery within the application context by creating a separate file that invokes celery.start() with the application's context. This means your tasks file doesn't have to be littered with context setup and teardowns. It also lends itself well to the flask 'application factory' pattern.
extensions.py
from from flask.ext.sqlalchemy import SQLAlchemy
from celery import Celery
db = SQLAlchemy()
celery = Celery()
tasks.py
from extensions import celery, db
from flask.globals import current_app
from celery.signals import task_postrun
#celery.task
def do_some_stuff():
current_app.logger.info("I have the application context")
#you can now use the db object from extensions
#task_postrun.connect
def close_session(*args, **kwargs):
# Flask SQLAlchemy will automatically create new sessions for you from
# a scoped session factory, given that we are maintaining the same app
# context, this ensures tasks have a fresh session (e.g. session errors
# won't propagate across tasks)
db.session.remove()
app.py
from extensions import celery, db
def create_app():
app = Flask()
#configure/initialize all your extensions
db.init_app(app)
celery.config_from_object(app.config)
return app
RunCelery.py
from app import create_app
from extensions import celery
app = create_app()
if __name__ == '__main__':
with app.app_context():
celery.start()
In your tasks.py file do the following:
from main import create_app
app = create_app()
celery = Celery(__name__)
celery.add_defaults(lambda: app.config)
#celery.task
def create_facet(project_id, **kwargs):
with app.test_request_context():
# your code
I used Paul Gibbs' answer with two differences. Instead of task_postrun I used worker_process_init. And instead of .remove() I used db.session.expire_all().
I'm not 100% sure, but from what I understand the way this works is when Celery creates a worker process, all inherited/shared db sessions will be expired, and SQLAlchemy will create new sessions on demand unique to that worker process.
So far it seems to have fixed my problem. With Paul's solution, when one worker finished and removed the session, another worker using the same session was still running its query, so db.session.remove() closed the connection while it was being used, giving me a "Lost connection to MySQL server during query" exception.
Thanks Paul for steering me in the right direction!
Nevermind that didn't work. I ended up having an argument in my Flask app factory to not run db.init_app(app) if Celery was calling it. Instead the workers will call it after Celery forks them. I now see several connections in my MySQL processlist.
from extensions import db
from celery.signals import worker_process_init
from flask import current_app
#worker_process_init.connect
def celery_worker_init_db(**_):
db.init_app(current_app)
from flask import Flask
from werkzeug.utils import import_string
from celery.signals import worker_process_init, celeryd_init
from flask_celery import Celery
from src.app import config_from_env, create_app
celery = Celery()
def get_celery_conf():
config = import_string('src.settings')
config = {k: getattr(config, k) for k in dir(config) if k.isupper()}
config['BROKER_URL'] = config['CELERY_BROKER_URL']
return config
#celeryd_init.connect
def init_celeryd(conf=None, **kwargs):
conf.update(get_celery_conf())
#worker_process_init.connect
def init_celery_flask_app(**kwargs):
app = create_app()
app.app_context().push()
Update celery config at celeryd init
Use your flask app factory to inititalize all flask extensions, including SQLAlchemy extension.
By doing this, we are able to maintain database connection per-worker.
If you want to run your task under flask context, you can subclass Task.__call__:
class SmartTask(Task):
abstract = True
def __call__(self, *_args, **_kwargs):
with self.app.flask_app.app_context():
with self.app.flask_app.test_request_context():
result = super(SmartTask, self).__call__(*_args, **_kwargs)
return result
class SmartCelery(Celery):
def init_app(self, app):
super(SmartCelery, self).init_app(app)
self.Task = SmartTask

Categories