using celery with flask_restful - python

i have a simple flask restful API where I want to execute the request as Celery task since some endpoints need a lot of execution time.
main.py:
from flask import Flask
from flask_restful import Api
from flask_celery import make_celery
app = Flask(__name__)
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0',
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
celery = make_celery(app)
api = Api(app)
api.add_resource(someResource, '/someendpoint/')
if __name__ == '__main__':
app.run(debug=True)
with make_celery.py:
from celery import Celery
def make_celery(app):
celery = Celery(
app.import_name,
backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
I now want to define that the Resource I defined in resource.py is a celery task:
class Cost(Resource):
def get(self):
result = some_code
return result
what is the most convenient way to make the get method a celery task here?
Thanks a lot!

Related

How to preserve Flask app context across Celery and SQLAlchemy

I'm building trying to learn Flask with a proof of concept Flask app, that takes a JSON payload, and uses SQLAlchemy to write it to a DB. I'm using celery to manage the write tasks.
The app is structured
|-app.py
|-project
|-__init__.py
|-celery_utils.py
|-config.py
|-users
|-__init_.py
|-models.py
|-tasks.py
app.py builds the flask app and celery instance.
app.py
from project import create_app, ext_celery
app = create_app()
celery = ext_celery.celery
#app.route("/")
def alive():
return "alive"
/project/__init__.py is the application factory for the flask app. It instantiates the extensions, links everything together, and registers the blueprints.
/project/init.py
import os
from flask import Flask
from flask_celeryext import FlaskCeleryExt
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from project.celery_utils import make_celery
from project.config import config
# instantiate extensions
db = SQLAlchemy()
migrate = Migrate()
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app(config_name=None):
if config_name is None:
config_name = os.environ.get("FLASK_CONFIG", "development")
# instantiate the app
app = Flask(__name__)
# set config
app.config.from_object(config[config_name])
# set up extensions
db.init_app(app)
migrate.init_app(app, db)
ext_celery.init_app(app)
# register blueprints
from project.users import users_blueprint
app.register_blueprint(users_blueprint)
# shell context for flask cli
#app.shell_context_processor
def ctx():
return {"app": app, "db": db}
return app
/project/celery_utils.py manages the creation of the celery instances
/project/celery_utils.py
from celery import current_app as current_celery_app
def make_celery(app):
celery = current_celery_app
celery.config_from_object(app.config, namespace="CELERY")
return celery
In the users dir, I'm trying to manage the creation of a basic user with celery task management.
'/project/users/init.py` is where I create the blueprints and routes.
/project/users/init.py
from flask import Blueprint, request, jsonify
from .tasks import divide, post_to_db
users_blueprint = Blueprint("users", __name__, url_prefix="/users", template_folder="templates")
from . import models, tasks
#users_blueprint.route('/users', methods=['POST'])
def users():
request_data = request.get_json()
task = post_to_db.delay(request_data)
response = {"id": task.task_id,
"status": task.status,
}
return jsonify(response)
#users_blueprint.route('/responses', methods=['GET'])
def responses():
request_data = request.get_json()
result = AsyncResult(id=request_data['id'])
response = result.get()
return jsonify(response)
/project/users/models.py is a simple User model - however, it does manage to successfully remain in the context of the flask app if created from the flask app cli.
/project/users/models.py
from project import db
class User(db.Model):
"""model for the user object"""
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(128), unique=True, nullable=False)
email = db.Column(db.String(128), unique=True, nullable=False)
def __init__(self, username, email, *args, **kwargs):
self.username = username
self.email = email
Finally, /project/users/tasks.py is where I handle the celery tasks for this dir.
/project/users/tasks.py
from celery import shared_task
from .models import User
from project import db
#shared_task()
def post_to_db(payload):
print("made it here")
user = User(**payload)
db.session.add(user)
db.session.commit()
db.session.close()
return True
The modules work, but as soon as I wire it all up and hit the endpoint with a JSON payload, I get the error message:
RuntimeError: No application found. Either work inside a view function or push an application context. ...
I have tried to preserve the app context in tasks.py by:
...
from project import db, ext_celery
#ext_celery.shared_task()
def post_to_db(payload):
...
...
from project import db, ext_celery
#ext_celery.task()
def post_to_db(payload):
...
These error with: TypeError: exceptions must derive from BaseException
I've tried pushing the app context
...
from project import db
from app import app
#shared_task()
def post_to_db(payload):
with app.app_context():
...
This also errors with: TypeError: exceptions must derive from BaseException
I've tried importing celery from the app itself
...
from project import db
from app import celery
#celery.task()
def post_to_db(payload):
...
This also errors with: TypeError: exceptions must derive from BaseException
Any suggestions gratefully received. There's a final piece of the puzzle I'm missing, and it's very frustrating.
With thanks to snakecharmerb
I had to add ContextTask to the make_celery() function in /project/celery_utils.py
from celery import current_app as current_celery_app
def make_celery(app):
celery = current_celery_app
celery.config_from_object(app.config, namespace="CELERY")
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
And then a few tweaks in /project/users/tasks.py
from celery import shared_task
from .models import User
from project import db
#shared_task()
def post_to_db(payload):
user = User(**payload)
db.session.add(user)
db.session.commit()
db.session.close()
return True
Now I can see the user in the database, and my message queue is progressing as expected.

How to run a celery task on flask startup?

Very simple question, I hope. I have a flask service that needs to listen to a subscription. I have all the code written to listen to the subscription and run some code when triggered.
The flask app code is:
from flask import Flask, jsonify
import logging
from celery.bin.worker import worker
from celery import shared_task
from proj.celery_config import make_celery, get_options
from proj.config import Config
app = Flask(__name__)
app.config.from_object(Config)
celery = make_celery(app)
options = get_options(app)
worker = worker(app)
#shared_task()
def run_listener():
listen()
#app.route('/actuator/health')
def health_check():
return jsonify({'status': 'UP'})
#app.route('/')
def hello_world():
"""
A simple test route for verifying flask is working.
:return: string containing "Hello, World!"
"""
logger.info("Hello world called!")
return 'Hello, World!'
def main():
return app
The code for make_celery and get_options is:
from celery import Celery
def make_celery(app):
celery = Celery(
app.import_name,
backend=app.config['CELERY_BROKER_URL'],
broker=app.config['CELERY_BROKER_URL']
)
celery.conf.update(app.config)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
def get_options(app):
return {
'broker': app.config['CELERY_BROKER_URL'],
'traceback': True,
'loglevel': 'info',
'queues': 'q1'
}
I have not set up all the endpoints yet, but here is something very important here: the task run_listener needs to work on startup without running the worker outside the app. Meaning I cannot do a celery -A tasks .... I just want it to run whenever the flask app is run. Is there a way to do this?
I have tried running worker.run(**options) but I get an error, no matter if I use the flask app or the celery app when initializing the worker object. Am I missing something?

Celery using default broker instead of reddis. Flask + Celery + Factory pattern

The closest working answer is that:
How to use Flask-SQLAlchemy in a Celery task
I aim this question at someone who is actually using python, flask, factory pattern and celery. Python is 2.7, others are latest version today.
I am trying to avoid circular dependencies and do it flasky way,
I have gone through 10 pages of google and all possible solutions and I could not solve this.
~/git/project celery -A app worker --loglevel=info
Celery is still connecting to:
[2017-11-10 16:08:12,208: ERROR/MainProcess] consumer: Cannot connect to amqp://guest:**#127.0.0.1:5672//: [Errno 111] Connection refused.
Trying again in 32.00 seconds...
Despite various attempts to start the app
app/extensions.py
from flask.ext.marshmallow import Marshmallow
from flask.ext.sqlalchemy import SQLAlchemy
from flask_mail import Mail
import flask
from celery import Celery
class FlaskCelery(Celery):
def __init__(self, *args, **kwargs):
super(FlaskCelery, self).__init__(*args, **kwargs)
self.patch_task()
if 'app' in kwargs:
self.init_app(kwargs['app'])
def patch_task(self):
TaskBase = self.Task
_celery = self
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
if flask.has_app_context():
return TaskBase.__call__(self, *args, **kwargs)
else:
with _celery.app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
self.Task = ContextTask
def init_app(self, app):
self.app = app
self.config_from_object(app.config)
print self._conf['broker_url']
celery = FlaskCelery()
db = SQLAlchemy()
ma = Marshmallow()
mail = Mail()
!!!!! print self._conf['broker_url']: redis://localhost:6379/0
app/init.py
from flask import Flask, render_template
from app.extensions import db, ma, mail, celery
from celerytasks import save_mailbox_items, sumf
from config import config
from utils import encoding_utils
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
# SQLAlchemy configuration
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://...'
# Celery configuration
app.config['BROKER_URL'] = 'redis://localhost:6379/0'
app.config['broker_url'] = 'redis://localhost:6379/0'
app.config['celery_broker_url'] = 'redis://localhost:6379/0'
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
register_extensions(app)
return app
def register_extensions(app):
db.init_app(app)
with app.app_context():
db.create_all()
ma.init_app(app)
mail.init_app(app)
celery.init_app(app)
from .api_v1 import api as api_v1_blueprint
app.register_blueprint(api_v1_blueprint, url_prefix='/api/v1')
#app.route('/', methods=['GET'])
def index():
return render_template('index.html')
./manager.py
import os
from flask.ext.script import Manager
from app import create_app
app = create_app(os.getenv('APP_CONFIG', 'default'))
manager = Manager(app)
#manager.shell
def make_shell_context():
return dict(app=app)
if __name__ == '__main__':
manager.run()
When you run your celery worker, it will use the one created with
celery = FlaskCelery()
But because it does not receive a Flask app as an argument, you never go through self.init_app(kwargs['app']) and thus it will use the default configuration.
Several options are possible to fix this here:
instantiate a FlaskCelery object and passing a Flask instance when doing so
in your FlaskCelery class, instantiate a flask app in your init function if no argument is passed in the constructor.
For the latest point, this would give something like
class FlaskCelery(Celery):
def __init__(self, *args, **kwargs):
super(FlaskCelery, self).__init__(*args, **kwargs)
self.patch_task()
if 'app' in kwargs:
self.init_app(kwargs['app'])
else:
self.init_app(create_app(os.getenv('APP_CONFIG', 'default')))
def patch_task(self):
TaskBase = self.Task
_celery = self
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
if flask.has_app_context():
return TaskBase.__call__(self, *args, **kwargs)
else:
with _celery.app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
self.Task = ContextTask
def init_app(self, app):
self.app = app
self.config_from_object(app.config)
print self._conf['broker_url']

Correctly managing postgresql connections in celery task for Flask-SQLAlchemy and Celery

I'm using Flask-SQLAlchemy, Celery and uWSGI.
I know that Flask-SQLAlchemy automatically manages the session for you. I'm not sure how this works with Celery workers, but it seems that when I run a task a second time, I get the following error: DatabaseError: (psycopg2.DatabaseError) server closed the connection unexpectedly.
Here's how I create the app context and celery tasks:
def make_celery(app):
celery = Celery(
app.import_name,
backend=app.config['CELERY_BACKEND'],
broker=app.config['CELERY_BROKER_URL'],
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
It seems that maybe the workers are using the same database connection and after a task completes that connection is not replenished?
It may be related to the following question?
I'm not sure how to correctly setup the workers or celery so that they're using new connections to the database..
Okay. I figured it out, for every process that's using an application context, you must use a new application context. Before, in my app/__init__.py I was simply creating the application globally like so:
from flask import Flask
app = Flask(__name__)
I then changed my app to use create_app like in this pattern
Now, my tasks.py looks like this:
from myapp import create_app
from celery import Celery
def make_celery(app=None):
app = app or create_app()
celery = Celery(
app.import_name,
backend=app.config['CELERY_BACKEND'],
broker=app.config['CELERY_BROKER_URL'],
)
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
celery = make_celery()
Make sure in your create_app you are calling db.init_app(app).

Flask app context and celery integration

When integrating celery with a Flask app does celery need to be aware of the Flask application context?
Can I just do something like:
import celery from Celery
celery = Celery()
#task
def mytask():
Or do I have to do this:
def make_celery(app=None):
app = app or create_app(os.getenv('FLASK_CONFIG') or 'default')
celery = Celery(__name__, broker=app.config.CELERY_BROKER_URL)
celery.conf.update(app.conf)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
And then run celery = make_celery(app)?
Celery tasks only need to be aware of the application context if you're doing things that requires it (database queries, etc). Otherwise you can just use Celery as is.

Categories