I am trying to use celery in an app to run long tasks asynchronously.
In the top project folder I have application.py:
from flask_stormpath import StormpathManager
from app import create_app
from celery import Celery
app = create_app('DevelopmentConfig')
stormpath_manager = StormpathManager(app)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
if __name__ == '__main__':
app.run()
The config.py looks like this:
class Config:
SECRET_KEY = 'something_very_secret'
broker_url = 'sqs://'
broker_transport_options = {'region': 'eu-west-1',
'visibility_timeout': 3600,
'polling_interval': 0.3,
'queue_name_prefix': 'celery-'}
csrf = SECRET_KEY
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
#staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
class ProductionConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'sqs://'
config = {'development': DevelopmentConfig,
'default': DevelopmentConfig}
and in my views.py I try to run a task:
from flask import render_template, flash, request, jsonify, Response
from wtforms import Form, validators, SelectMultipleField, widgets
from flask_stormpath import login_required
from . import main
import numpy as np
class MultiCheckboxField(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
#celery.task(bin=True)
def do_something(test, training_size, testing_size):
Now, when I run it like this, I get the message that #celery.task the name celery was not defined. Fair point, so I changed it to #main.celery.task. When I do this, I get the error message "AttributeError: 'Blueprint' object has no attribute 'celery'.
Then I tried to initiate celery in the init.py file:
from flask import Flask
from celery import Celery
def create_app(config_name):
app = Flask(__name__)
configuration = "config."+config_name
app.config.from_object(configuration)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
When I do this, I get the error: ImportError: No module named CELERY_BROKER_URL
So, I don't know, where to import and initiate celery and how to create a blueprint so that I can use celery.task in views.py. Any help would be highly appreciated.
Related
I have set all configurations inside Configure class belonging to settings.py module
import os, secrets
basedir = os.path.abspath(os.path.dirname(__file__))
class Config():
SECRET_KEY = secrets.token_hex(16)
SQLALCHEMY_DATABASE_URI ='sqlite:///' + os.path.join(basedir, 'sqlitedb/data.sqlite')
SQLALCHEMY_TRACK_MODIFICATIONS = False
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
...........
Which is initialized by main app.py
from flask import Flask
from MyProject.extensions import db, mail, bootstrap, migrate
**from MyProject.settings import Config**
from werkzeug.security import generate_password_hash, check_password_hash
def create_app():
app = Flask(__name__.split('.')[0])
app.url_map.strict_slashes = False
**app.config.from_object(Config)**
register_extensions(app)
register_blueprints(app)
return app
My task is to handover the SECRET_KEY value as an argument to the function which is inside other (mail.py) module
from itsdangerous import URLSafeTimedSerializer
from Myproject.app import mail
from Myproject import app
def send_congrats_email(user):
confirm_serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
confirm_url = url_for(
'confirm_email',
token=confirm_serializer.dumps(user, salt='email-confirmation-salt'),
_external=True)
send_email('[Congrats] You are registered',
sender= "NICOLAS",
recipients=[user.email],
html_body=render_template('users/email_confirmation.html',
user=user))
Please, advice what is the right/proper way of doing this task?
Solution to such problem is the following:
from flask import current_app
and then modify:
confirm_serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
to
confirm_serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
I spent a lot of hours trying to understand why some celery configuration variable (imports and beat_schedule) from my config file are not loaded.
I would like to centralize all my config variables in config.py, but BEAT_SCHEDULE, IMPORTS and some other config variables are not taken into account.
config.py
import os
from dotenv import load_dotenv
load_dotenv()
# Find the absolute file path to the top level project directory
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Base configuration class. Contains default configuration settings + configuration settings applicable to all environments.
"""
# Default settings
FLASK_ENV = 'development'
DEBUG = False
TESTING = False
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ACCEPT_CONTENT = ['json', 'raw']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['app.tasks']
CELERY_BEAT_SCHEDULE = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
The make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.flask_app = app
celery.Task = AppContextTask
return celery
I create the Celery app and initialize it in init.py
from flask_celeryext import FlaskCeleryExt
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app():
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
# Configure the flask app instance
CONFIG_TYPE = os.getenv('CONFIG_TYPE', default='config.Config')
app.config.from_object(CONFIG_TYPE)
ext_celery.init_app(app)
print(ext_celery.celery.conf.humanize(with_defaults=False, censored=True))
If I check the console, I can't see any CELERY_BEAT_SCHEDULE option set.
I tried with different config variable name (CELERYBEAT_SCHEDULE, BEAT_SCHEDULE, beat_schedule), nothing work.
But if I modify my make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.conf.imports = ['app.tasks']
celery.conf.beat_schedule = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
celery.flask_app = app
celery.Task = AppContextTask
return celery
It is working as expected
Why BEAT_SCHEDULE and IMPORTS are not loaded from my config.py?
Thanks
I am getting the following error when trying to set up a scheduled job for my flask app:
This typically means that you attempted to use functionality that needed
to interface with the current application object in some way. To solve
this, set up an application context with app.app_context(). See the
documentation for more information.
I have tried to include the function 'print_session' (which is just a dummy function to check the session data will pull through - in reality this function will query a database) with a 'current_app.appcontext() with loop as I have seen on a few other apps but no joy. Does anyone know why it is still out of the application context?
main.py
from website import create_app
app = create_app()
if __name__=="__main__":
app.run(debug=True,host='localhost',port=5000,threaded=True)
init.py
from flask import Flask, session
from flask_sqlalchemy import SQLAlchemy
from os import path
from flask_session import Session
from flask_login import LoginManager
import redis
db = SQLAlchemy()
DB_NAME = 'sqlite:///db.sqlite3'
sess=Session()
login_manager = LoginManager()
def create_app():
app = Flask(__name__)
app.config['SECRET_KEY'] = "SECRET_KEY"
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
app.config['SESSION_TYPE'] = 'SESSION_TYPE'
app.config['SESSION_REDIS'] = 'SESSION_REDIS'
db.init_app(app)
sess.init_app(app)
login_manager.login_view = 'auth.login'
login_manager.init_app(app)
# with app.app_context():
from .views import views
from .auth import auth
app.register_blueprint(views,url_prefix='/')
app.register_blueprint(auth,url_prefix='/')
from .models import User,Token
create_database(app)
return app
def create_database(app):
db.create_all(app=app)
print('Created database')
views.py
from flask import Blueprint,render_template,session,redirect,request,url_for
from flask import current_app
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.schedulers.blocking import BlockingScheduler
import datetime
from flask_login import login_required,current_user
from requests_oauthlib import OAuth2Session
from . import db
from .models import Token
from functools import wraps
def print_session(value):
with current_app.app_context():
print('Yes',value)
return(redirect(url_for('views.home')))
#views.route('/start_schedule')
#login_required
def start_xero_schedule():
with app.app_context():
sched = BackgroundScheduler()
sched.add_job(print_session,'interval',args=[session['value']],seconds=10)
sched.start()
return(redirect(url_for('views.xero')))
I tried to add celery to my existing flask project. After adding, I got an "working outside of application context" error while running. It seems that the celery worker lacks of my application context. But I am not sure where to pass the applicaiton context to celery worker in this case.
Here is my current structure (I tried to follow a factory pattern with blueprints and api documentions):
-run.py
-app
-module1
-controller.py
-model.py
-service.py
-__init__.py
-config.py
For the init.py
# __init__.py
import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
from app.config import Config
from flask_restplus import Api
from celery import Celery
cors = CORS()
db = SQLAlchemy()
api = Api()
celery = Celery(__name__, broker=Config.CELERY_BROKER_URL, include=["app.module1.service"])
def create_app(config_class = Config):
app = Flask(__name__, static_url_path='')
app.config.from_object(Config)
cors.init_app(app)
db.init_app(app)
api.init_app(app=app)
celery.conf.update(app.config)
from app.module1.controller import blueprint
from app.module1.controller import ns
app.register_blueprint(blueprint)
api.add_namespace(ns)
return app
For the run.py
from app import create_app
app = create_app()
if __name__ == '__main__':
app.run(threaded=True, debug=True)
For the service.py
from app import db, celery
#celery.task(bind=True)
def service1(self):
# do somethigng & return
For the controller.py
from flask import Blueprint
from flask_restplus import Api, Resouce
blueprint = Blueprint('service', __name__)
apis = Api(app = blueprint)
ns = apis.namespace('service', 'service description')
#ns.route("/")
class SomeList(Resource):
def get(self):
service1.apply_async()
# return
I think the confusion is based on the fact that you are trying to "pass" an application context to the Celery worker. In reality the Flask process cannot pass a context to the worker because they are different processes. The Celery worker process needs to create its own Flask application instance by calling create_app() so that it can push its own app contexts when needed.
So for example, in your service1 task:
from app import db, celery, create_app
#celery.task(bind=True)
def service1(self):
app = create_app()
with app.app_context():
# do somethigng & return
To make this a bit more efficient, you can create a single global app that is shared by all your tasks:
from app import db, celery, create_app
app = create_app()
#celery.task(bind=True)
def service1(self):
with app.app_context():
# do somethigng & return
I'm making an app in FLASK and I've incorporated Celery into it. However, I have to run the app via the terminal if I want the Celery worker to work as well. (celery -A app.celery worker). I tried running it from the main run.py file as follows
init.py
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.mail import Mail
from celery import Celery
from kombu import serialization
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
mail = Mail(app)
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
app.config['CELERY_ACCEPT_CONTENT'] = ['json']
app.config['CELERY_TASK_SERIALIZER'] = 'json'
app.config['CELERY_RESULT_SERIALIZER'] = 'json'
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
serialization.registry._decoders.pop("application/x-python-serialize")
from app import views
and run.py
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
app = current_app._get_current_object()
worker = worker.worker(app=app)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)
But this gives the error AttributeError: 'Celery' object has no attribute 'config'
Any pointers as to what Im doing wrong would be much appreciated.
Your run.py should be:
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
application = current_app._get_current_object()
worker = worker.worker(app=application)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)