Run Celery Worker from FLASK app - python

I'm making an app in FLASK and I've incorporated Celery into it. However, I have to run the app via the terminal if I want the Celery worker to work as well. (celery -A app.celery worker). I tried running it from the main run.py file as follows
init.py
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.mail import Mail
from celery import Celery
from kombu import serialization
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
mail = Mail(app)
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
app.config['CELERY_ACCEPT_CONTENT'] = ['json']
app.config['CELERY_TASK_SERIALIZER'] = 'json'
app.config['CELERY_RESULT_SERIALIZER'] = 'json'
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
serialization.registry._decoders.pop("application/x-python-serialize")
from app import views
and run.py
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
app = current_app._get_current_object()
worker = worker.worker(app=app)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)
But this gives the error AttributeError: 'Celery' object has no attribute 'config'
Any pointers as to what Im doing wrong would be much appreciated.

Your run.py should be:
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
application = current_app._get_current_object()
worker = worker.worker(app=application)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)

Related

Celery not loading some configuration variable

I spent a lot of hours trying to understand why some celery configuration variable (imports and beat_schedule) from my config file are not loaded.
I would like to centralize all my config variables in config.py, but BEAT_SCHEDULE, IMPORTS and some other config variables are not taken into account.
config.py
import os
from dotenv import load_dotenv
load_dotenv()
# Find the absolute file path to the top level project directory
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Base configuration class. Contains default configuration settings + configuration settings applicable to all environments.
"""
# Default settings
FLASK_ENV = 'development'
DEBUG = False
TESTING = False
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ACCEPT_CONTENT = ['json', 'raw']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['app.tasks']
CELERY_BEAT_SCHEDULE = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
The make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.flask_app = app
celery.Task = AppContextTask
return celery
I create the Celery app and initialize it in init.py
from flask_celeryext import FlaskCeleryExt
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app():
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
# Configure the flask app instance
CONFIG_TYPE = os.getenv('CONFIG_TYPE', default='config.Config')
app.config.from_object(CONFIG_TYPE)
ext_celery.init_app(app)
print(ext_celery.celery.conf.humanize(with_defaults=False, censored=True))
If I check the console, I can't see any CELERY_BEAT_SCHEDULE option set.
I tried with different config variable name (CELERYBEAT_SCHEDULE, BEAT_SCHEDULE, beat_schedule), nothing work.
But if I modify my make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.conf.imports = ['app.tasks']
celery.conf.beat_schedule = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
celery.flask_app = app
celery.Task = AppContextTask
return celery
It is working as expected
Why BEAT_SCHEDULE and IMPORTS are not loaded from my config.py?
Thanks

Pass application context to celery in flask framework

I tried to add celery to my existing flask project. After adding, I got an "working outside of application context" error while running. It seems that the celery worker lacks of my application context. But I am not sure where to pass the applicaiton context to celery worker in this case.
Here is my current structure (I tried to follow a factory pattern with blueprints and api documentions):
-run.py
-app
-module1
-controller.py
-model.py
-service.py
-__init__.py
-config.py
For the init.py
# __init__.py
import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
from app.config import Config
from flask_restplus import Api
from celery import Celery
cors = CORS()
db = SQLAlchemy()
api = Api()
celery = Celery(__name__, broker=Config.CELERY_BROKER_URL, include=["app.module1.service"])
def create_app(config_class = Config):
app = Flask(__name__, static_url_path='')
app.config.from_object(Config)
cors.init_app(app)
db.init_app(app)
api.init_app(app=app)
celery.conf.update(app.config)
from app.module1.controller import blueprint
from app.module1.controller import ns
app.register_blueprint(blueprint)
api.add_namespace(ns)
return app
For the run.py
from app import create_app
app = create_app()
if __name__ == '__main__':
app.run(threaded=True, debug=True)
For the service.py
from app import db, celery
#celery.task(bind=True)
def service1(self):
# do somethigng & return
For the controller.py
from flask import Blueprint
from flask_restplus import Api, Resouce
blueprint = Blueprint('service', __name__)
apis = Api(app = blueprint)
ns = apis.namespace('service', 'service description')
#ns.route("/")
class SomeList(Resource):
def get(self):
service1.apply_async()
# return
I think the confusion is based on the fact that you are trying to "pass" an application context to the Celery worker. In reality the Flask process cannot pass a context to the worker because they are different processes. The Celery worker process needs to create its own Flask application instance by calling create_app() so that it can push its own app contexts when needed.
So for example, in your service1 task:
from app import db, celery, create_app
#celery.task(bind=True)
def service1(self):
app = create_app()
with app.app_context():
# do somethigng & return
To make this a bit more efficient, you can create a single global app that is shared by all your tasks:
from app import db, celery, create_app
app = create_app()
#celery.task(bind=True)
def service1(self):
with app.app_context():
# do somethigng & return

Celery [ERROR/MainProcess] Process 'Worker' exited with 'exitcode 1'

I am trying to use celery==3.1.25 (Python 2.7) to run some expensive tasks separately to my main flask app. However, when I start the celery worker using celery -A run.celery worker --loglevel=info the process fails with:
[ERROR/MainProcess] Process 'Worker' exited with 'exitcode 1'
Here is my file structure:
app/
__init__.py
celery_functions.py
routes.py
...
run.py
run.py:
from app import create_app
from app.config import Config
from celery import Celery
app = create_app('default')
app.app_context().push()
from app.routes import *
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
__init__.py:
from flask import Flask
from flask_bootstrap import Bootstrap
from config import Config
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap = Bootstrap(app)
return app
celery_functions.py:
import celery
#celery.task(name='celery_functions.archive_repo')
def archive_repo():
# do something
routes.py:
from celery_functions import *
#app.route('/archive', methods=['GET', 'POST'])
#login_required
def archive():
archive_repo.delay()
return ''

Flask and Celery large application structure

I am trying to use celery in an app to run long tasks asynchronously.
In the top project folder I have application.py:
from flask_stormpath import StormpathManager
from app import create_app
from celery import Celery
app = create_app('DevelopmentConfig')
stormpath_manager = StormpathManager(app)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
if __name__ == '__main__':
app.run()
The config.py looks like this:
class Config:
SECRET_KEY = 'something_very_secret'
broker_url = 'sqs://'
broker_transport_options = {'region': 'eu-west-1',
'visibility_timeout': 3600,
'polling_interval': 0.3,
'queue_name_prefix': 'celery-'}
csrf = SECRET_KEY
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
#staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
class ProductionConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'sqs://'
config = {'development': DevelopmentConfig,
'default': DevelopmentConfig}
and in my views.py I try to run a task:
from flask import render_template, flash, request, jsonify, Response
from wtforms import Form, validators, SelectMultipleField, widgets
from flask_stormpath import login_required
from . import main
import numpy as np
class MultiCheckboxField(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
#celery.task(bin=True)
def do_something(test, training_size, testing_size):
Now, when I run it like this, I get the message that #celery.task the name celery was not defined. Fair point, so I changed it to #main.celery.task. When I do this, I get the error message "AttributeError: 'Blueprint' object has no attribute 'celery'.
Then I tried to initiate celery in the init.py file:
from flask import Flask
from celery import Celery
def create_app(config_name):
app = Flask(__name__)
configuration = "config."+config_name
app.config.from_object(configuration)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
When I do this, I get the error: ImportError: No module named CELERY_BROKER_URL
So, I don't know, where to import and initiate celery and how to create a blueprint so that I can use celery.task in views.py. Any help would be highly appreciated.

Save the Celery task in DB- Django

I'm referring to Django Celery documents.
I created celery.py in my proj/proj just as the document said. and then included __init__.py
celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
app = Celery('proj')
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
__init__.py
from __future__ import absolute_import
from .celery import app as celery_app
I installed pip install django-celery , then migrated python manage.py migrate djcelery
It made some of the tables in my DB.
tasks.py
from __future__ import absolute_import
from celery import shared_task
import requests
import json
#shared_task
def post_notification(data,url):
headers = {'content-type': 'application/json'}
requests.post(url, data=json.dumps(data), headers=headers)
After that I called my task in my views as
task = post_notification.delay(data,url)
print task.id #it prints an id
print task.status # prints PENDING
But nothing gets logged into any of my tables.
I've read my threads on SO,Thread1 , Thread2 and many more given on these threads, but nothing happens.
It provides me the ID & status of the task but how do I save the task in the DB? Usually it should get logged into celery_taskmeta, but there's nothing in there.
Though the task gets execute but I want to save the task in DB as well. How can I do it? Is there something I'm missing?
try this in celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app_name.dev_settings')
app = Celery('app_name')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.CELERY_TIMEZONE = 'UTC'
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
)
Add following in settings.py file
BROKER_URL = 'amqp://guest:guest#localhost//'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
And start the worker.

Categories