Importing Celery in Flask Blueprints - python

I have a Flask Application with an MVC structure:
my_app
├── server.py
├── requirements.txt
├── models
│ ├── __init__.py
└── model.py
├── controllers
├── __init__.py
├── client_controllers
└──controller.py
└── another_controller.py
└── templates
I use blueprints to split the server code in "controllers" so I have something like this:
server.py:
from flask import Flask
from celery import Celery
from controllers.client_controllers.controller import controller
app = Flask(__name__)
app.secret_key = 'SECRET'
app.register_blueprint(controller)
# Celery Configuration
def make_celery(app):
celery = Celery(app.import_name, backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
app.config.update(
CELERY_BROKER_URL='redis://localhost:6379',
CELERY_RESULT_BACKEND='redis://localhost:6379'
)
celery = make_celery(app)
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
controller.py:
from flask import Blueprint, render_template, json, request, redirect, url_for, abort, session
controller = Blueprint('controller', __name__,
template_folder='templates/')
#celery.task()
def add_together(a, b):
return a + b
#controller.route('/add', methods=['GET'])
def add():
result = add_together.delay(23, 42)
result.wait()
return 'Processing'
As you may notice, celery is not imported into the controller, because I don't know how to import the celery instance from server.py into my controller.py without getting an error, I've been trying with:
from ...server import celery
from ..server import celery
...etc
but still failing with errors.

The flask Error RuntimeError: Working outside of application context. happens because you are not in a Flask application_context().
You should use celery shared_task which is what you need given your MVC structure.
celery_flask/
├── celery_tasks
│   ├── app_tasks.py
│   ├── __init__.py
├── celery_worker.py
├── controllers
│   ├── __init__.py
│   ├── some_controller.py
├── __init__.py
└── server.py
Script app_tasks.py
#=====================
# app_tasks.py
#=====================
from __future__ import absolute_import, unicode_literals
from celery import shared_task
#shared_task(name='celery_tasks.add_together')
def add_together(x, y):
return x + y
The #shared_task decorator returns a proxy that always points to the active Celery instances:
>>> from celery import Celery, shared_task
>>> #shared_task
... def add_together(x, y):
... return x + y
...
>>> app1 = Celery(broker='amqp://')
>>> add_together.app is app1
True
>>> app2 = Celery(broker='redis://')
>>> add_together.app is app2
True
After you define you task you can call them using a reference to a Celery app. This celery app could be part of the flask application_context(). Example:
Script server.py
from __future__ import absolute_import
from flask import Flask
from celery import Celery
from controllers.some_controller import controller
flask_app = Flask(__name__)
flask_app.secret_key = 'SECRET'
flask_app.register_blueprint(controller)
# Celery Configuration
def make_celery( app ):
celery = Celery('flask-celery-app', backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL'],
include=['celery_tasks.app_tasks'])
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
def list_celery_task( ):
from celery.task.control import inspect
i = inspect()
i.registered_tasks()
from itertools import chain
t = set(chain.from_iterable( i.registered_tasks().values() ))
print "registered_tasks={}".format( t )
#======================================
# MAIN
#======================================
flask_app.config.update(
CELERY_BROKER_URL='redis://localhost:6379',
CELERY_RESULT_BACKEND='redis://localhost:6379'
)
celery = make_celery(flask_app)
flask_app.celery = celery
list_celery_task( )
if __name__ == "__main__":
flask_app.run(host='0.0.0.0', debug=True)
Script some_controller.py
#============================
# some_controller.py
#============================
from __future__ import absolute_import
from flask import Blueprint
from flask import current_app
controller = Blueprint('controller', __name__,
template_folder='templates/')
#controller.route('/add', methods=['GET'])
def add():
print "calling add"
result = current_app.celery.send_task('celery_tasks.add_together',args=[12,6])
r = result.get()
print 'Processing is {}'.format( r )
return 'Processing is {}'.format( r )
Finally, start the worker to consume the tasks:
celery -A celery_worker worker --loglevel=DEBUG
Script celery_worker.py
#============================
# celery_worker.py
#============================
from __future__ import absolute_import
from celery import Celery
# Celery Configuration
def make_celery():
celery = Celery('flask-celery-app', backend='redis://localhost:6379',
broker='redis://localhost:6379',
include=['celery_tasks.app_tasks'])
return celery
celery = make_celery()
print "tasks={}".format( celery.tasks.keys() )

One option is to assign celery instance to the app instance and then access it through flask's current_app.
In you server.py, just add:
celery = make_celery(app)
app.celery = celery
Then you can access this in your controller.py:
from flask import current_app
#current_app.celery.task()
def add_together(a, b):
return a + b

Related

Initializing database with flask

I have the following code:
init.py:
"""Initialize app."""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app():
"""Construct the core application."""
app = Flask(__name__, instance_relative_config=False)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite'
app.config['RECAPTCHA_PUBLIC_KEY'] = '6LcmEeoUAAAAAIbdhgkFBvz676UCRJMjnSx8H6zy'
app.config['RECAPTCHA_PARAMETERS'] = {'size': '100%'}
db.init_app(app)
# blueprint for auth routes in our app
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint)
# blueprint for non-auth parts of app
from .__main__ import main as main_blueprint
app.register_blueprint(main_blueprint)
with app.app_context():
# Import parts of our application
from . import routes
return app
and I try to initialize the db with the following code:
from realProject import db, create_app
db.create_all(app=create_app())
all of the scripts are in realProject folder
but when trying to compile the last code I get this error:
ModuleNotFoundError: No module named 'realProject'
What am I doing wrong?
You need to follow this structure :
|__project_name
|__app.py ----------> The main file from where you run your app
|__app -----> This is the app folder
├── templates
│ └── index.html
└── __init__.py -----------> The __init__.py should be inside the app folder for it to be imported
└── routes.py
And then do this in your main file :
from app import db, create_app
db.create_all(app=create_app())

Run Celery task locking Flask

I am trying to organize project structure for Flask+Celery app.
When i worked in single file all works fine.
But when i distributed code in modules, calling test_task.apply_async() is locking flask.
My project structure:
web_spider/
app/
__init__.py
rest/
__init__.py
views/
__init__.py
test_view.py
flask_app.py
task_runner/
__init__.py
celery_app.py
tasks.py
requirements.txt
test_view.py
import flask
from app.task_runner.tasks import test_task
api_test_view = flask.Blueprint('api_test_view', __name__)
#api_test_view.route('/')
def test_view():
test_task.apply_async() #lock there
return 'Hello, World!'
flask_app.py
import flask
from app.rest.views.api_test_view import test_view
flask_app = flask.Flask(__name__)
flask_app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
flask_app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
flask_app.register_blueprint(test_view)
if __name__ == '__main__':
flask_app.run(debug=True)
celery_app.py
from app.rest.flask_app import flask_app
import celery
celery_app = celery.Celery(flask_app.name, broker=flask_app.config['CELERY_BROKER_URL'])
celery_app.conf.update(flask_app.config)
tasks.py
from celery import shared_task
#shared_task
def test_task():
return 1 + 1
Probably, you don't load celery_app and shared_task can't find the Celery app to work with. Add to your web_spider/app/__init__.py or to web_spider/app/task_runner/__init__.py:
from app.task_runner.celery_app import celery_app
__all__ = ('celery_app',)
It's documented at https://docs.celeryproject.org/en/stable/django/first-steps-with-django.html#django-first-steps search for shared_task.

Can't import models in tasks.py with Celery + Django

I want to create a background task to update a record on a specific date. I'm using Django and Celery with RabbitMQ.
I've managed to get the task called when the model is saved with this dummy task function:
tasks.py
from __future__ import absolute_import
from celery import Celery
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
app = Celery('tasks', broker='amqp://localhost//')
#app.task(name='news.tasks.update_news_status')
def update_news_status(news_id):
# (I pass the news id and return it, nothing complicated about it)
return news_id
this task is called from my save() method in my models.py
from django.db import models
from celery import current_app
class News(models.model):
(...)
def save(self, *args, **kwargs):
current_app.send_task('news.tasks.update_news_status', args=(self.id,))
super(News, self).save(*args, **kwargs)
Thing is I want to import my News model in tasks.py but if I try to like this:
from .models import News
I get this error :
django.core.exceptions.ImproperlyConfigured: Requested setting
DEFAULT_INDEX_TABLESPACE, but settings are not configured. You must
either define the environment variable DJANGO_SETTINGS_MODULE or call
settings.configure() before accessing settings.
This is how mi celery.py looks like
from __future__ import absolute_import, unicode_literals
from celery import Celery
import os
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myapp.settings')
app = Celery('myapp')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
I have already tried this:
can't import django model into celery task
I have tried to make the import inside the task method Django and Celery, AppRegisteredNotReady exception
I have also tried this Celery - importing models in tasks.py
I also tried to create a utils.py and import it and was not possible.
and ran into different errors but in the end I'm not able to import any module in tasks.py
There might be something wrong with my config but I can't see the error, I followed the steps in The Celery Docs: First steps with Django
Also, my project structure looks like this:
├── myapp
│ ├── __init__.py
├── ├── celery.py
│ ├── settings.py
│ ├── urls.py
│ └── wsgi.py
├── news
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── tasks.py
│ ├── urls.py
│ ├── models.py
│ ├── views.py
├── manage.py
I'm executing the worker from myapp directory like this:
celery -A news.tasks worker --loglevel=info
What am I missing here? Thanks in advance for your help!
lambda: settings.INSTALLED_APPS
EDIT
After making the changes suggested in comments:
Add this to celery.py
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
and import inside method: tasks.py
from __future__ import absolute_import
from celery import Celery
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
app = Celery('tasks', broker='amqp://localhost//')
#app.task(name='news.tasks.update_news_status')
def update_news_status(news_id):
from .models import News
return news_id
I get the following error:
[2018-07-20 12:24:29,337: ERROR/ForkPoolWorker-1] Task news.tasks.update_news_status[87f9ec92-c260-4ee9-a3bc-5f684c819f79] raised unexpected: ValueError('Attempted relative import in non-package',)
Traceback (most recent call last):
File "/Users/carla/Develop/App/backend/myapp-venv/lib/python2.7/site-packages/celery/app/trace.py", line 382, in trace_task
R = retval = fun(*args, **kwargs)
File "/Users/carla/Develop/App/backend/myapp-venv/lib/python2.7/site-packages/celery/app/trace.py", line 641, in __protected_call__
return self.run(*args, **kwargs)
File "/Users/carla/Develop/App/backend/news/tasks.py", line 12, in update_news_status
from .models import News
ValueError: Attempted relative import in non-package
Ok so for anyone struggling with this... turns out my celery.py wasn't reading env variables from the settings.
After a week and lots of research I realised that Celery is not a process of Django but a process running outside of it (duh), so when I tried to load the settings they were loaded but then I wasn't able to access the env variables I have defined in my .env ( I use the dotenv library). Celery was trying to look up for the env variables in my .bash_profile (of course)
So in the end my solution was to create a helper module in the same directory where my celery.py is defined, called load_env.py with the following
from os.path import dirname, join
import dotenv
def load_env():
"Get the path to the .env file and load it."
project_dir = dirname(dirname(__file__))
dotenv.read_dotenv(join(project_dir, '.env'))
and then on my celery.py (note the last import and first instruction)
from __future__ import absolute_import, unicode_literals
from celery import Celery
from django.conf import settings
import os
from .load_env import load_env
load_env()
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myapp.settings")
app = Celery('myapp')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('myapp.settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
after the call to load_env() env variables are loaded and the celery worker has access to them. By doing this I am now able to access other modules from my tasks.py, which was my main problem.
Credits to this guys (Caktus Consulting Group) and their django-project-template because if it wasn't for them I wouldn't find the answer. Thanks.
try something like this. its working in 3.1 celery, import should happen inside save method and after super()
from django.db import models
class News(models.model):
(...)
def save(self, *args, **kwargs):
(...)
super(News, self).save(*args, **kwargs)
from task import update_news_status
update_news_status.apply_async((self.id,)) #apply_async or delay
Here what i would do (Django 1.11 and celery 4.2), you have a problem in your celery config and you try to re-declare the Celery instance :
tasks.py
from myapp.celery import app # would contain what you need :)
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
#app.task(name='news.tasks.update_news_status')
def update_news_status(news_id):
# (I pass the news id and return it, nothing complicated about it)
return news_id
celery.py
from __future__ import absolute_import, unicode_literals
from celery import Celery
from django.conf import settings
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myapp.settings")
app = Celery('myapp', backend='rpc://', broker=BROKER_URL) # your config here
app.config_from_object('django.myapp:settings', namespace='CELERY') # change here
app.autodiscover_tasks()
models.py
from django.db import models
class News(models.model):
(...)
def save(self, *args, **kwargs):
super(News, self).save(*args, **kwargs)
from news.tasks import update_news_status
update_news_status.delay(self.id) # change here
And launch it with celery -A myapp worker --loglevel=info because your app is defined in myapp.celery so -A parameter need to be the app where the conf is declared

Can't call Celery in Django

So the problem is that when I try to call main task from Django lpr.views.py page shows that loading icon and thats it, nothin else happens. There is no output in Django or Celery console. When I try and run the task from python shell it runs without a problem and saves result in db. I added add task for test purposes and when I run add task it returns an error because of missing 'y' argument which is normal. But what is up with that main task?
There is my code just in case.
Project structure:
Project
├── acpvs
│   ├── celery.py
│   ├── __init__.py
│   ├── settings.py
│   ├── urls.py
│   └── wsgi.py
├── db.sqlite3
├── lpr
│   ├── __init__.py
│   ├── tasks.py
│   ├── urls.py
│   └── views.py
└── manage.py
settings.py
import djcelery
INSTALLED_APPS = [
...
'djcelery',
'django_celery_results',
]
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_ALWAYS_EAGER = False
djcelery.setup_loader()
init.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from acpvs.celery import app as celery_app
__all__ = ['celery_app']
acpvs.celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'acpvs.settings')
app = Celery('acpvs')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
lpr.tasks.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from djcelery import celery
#shared_task
def add(x, y):
return x + y
#shared_task
def main():
...
args = {
'imageName': imageName,
'flag': True
}
return args
lpr.urls.py
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^t/$', views.test_add),
url(r'^t1/$', views.test_main),
]
lpr.views.py
from . import tasks
from django.http import HttpResponse
def test_add(request):
result = tasks.add.delay()
return HttpResponse(result.task_id)
der test_main(request):
result = tasks.main.delay()
return HttpResponse(result.task_id)
Update
It seems to me that there is still something wrong with that how I have integrated Celery. When I remowe .delay() from views.py it works but ofcourse not async and not using Celery.
delay() is actually execute the task asynchronously, Please confirm if it updating the values in db,
I think it will update the value in db (if main method is doing) but will not return the value since client adds a message to the queue, the broker then delivers that message to a worker which then perform the operation of main
So I got it working by removing all djcelery instances and upgrading Django from 1.11 to 2.0.3
By the way I'm using Celery 4.1.0

Flask and Celery large application structure

I am trying to use celery in an app to run long tasks asynchronously.
In the top project folder I have application.py:
from flask_stormpath import StormpathManager
from app import create_app
from celery import Celery
app = create_app('DevelopmentConfig')
stormpath_manager = StormpathManager(app)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
if __name__ == '__main__':
app.run()
The config.py looks like this:
class Config:
SECRET_KEY = 'something_very_secret'
broker_url = 'sqs://'
broker_transport_options = {'region': 'eu-west-1',
'visibility_timeout': 3600,
'polling_interval': 0.3,
'queue_name_prefix': 'celery-'}
csrf = SECRET_KEY
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
#staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
class ProductionConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'sqs://'
config = {'development': DevelopmentConfig,
'default': DevelopmentConfig}
and in my views.py I try to run a task:
from flask import render_template, flash, request, jsonify, Response
from wtforms import Form, validators, SelectMultipleField, widgets
from flask_stormpath import login_required
from . import main
import numpy as np
class MultiCheckboxField(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
#celery.task(bin=True)
def do_something(test, training_size, testing_size):
Now, when I run it like this, I get the message that #celery.task the name celery was not defined. Fair point, so I changed it to #main.celery.task. When I do this, I get the error message "AttributeError: 'Blueprint' object has no attribute 'celery'.
Then I tried to initiate celery in the init.py file:
from flask import Flask
from celery import Celery
def create_app(config_name):
app = Flask(__name__)
configuration = "config."+config_name
app.config.from_object(configuration)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
When I do this, I get the error: ImportError: No module named CELERY_BROKER_URL
So, I don't know, where to import and initiate celery and how to create a blueprint so that I can use celery.task in views.py. Any help would be highly appreciated.

Categories