I spent a lot of hours trying to understand why some celery configuration variable (imports and beat_schedule) from my config file are not loaded.
I would like to centralize all my config variables in config.py, but BEAT_SCHEDULE, IMPORTS and some other config variables are not taken into account.
config.py
import os
from dotenv import load_dotenv
load_dotenv()
# Find the absolute file path to the top level project directory
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Base configuration class. Contains default configuration settings + configuration settings applicable to all environments.
"""
# Default settings
FLASK_ENV = 'development'
DEBUG = False
TESTING = False
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ACCEPT_CONTENT = ['json', 'raw']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['app.tasks']
CELERY_BEAT_SCHEDULE = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
The make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.flask_app = app
celery.Task = AppContextTask
return celery
I create the Celery app and initialize it in init.py
from flask_celeryext import FlaskCeleryExt
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app():
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
# Configure the flask app instance
CONFIG_TYPE = os.getenv('CONFIG_TYPE', default='config.Config')
app.config.from_object(CONFIG_TYPE)
ext_celery.init_app(app)
print(ext_celery.celery.conf.humanize(with_defaults=False, censored=True))
If I check the console, I can't see any CELERY_BEAT_SCHEDULE option set.
I tried with different config variable name (CELERYBEAT_SCHEDULE, BEAT_SCHEDULE, beat_schedule), nothing work.
But if I modify my make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.conf.imports = ['app.tasks']
celery.conf.beat_schedule = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
celery.flask_app = app
celery.Task = AppContextTask
return celery
It is working as expected
Why BEAT_SCHEDULE and IMPORTS are not loaded from my config.py?
Thanks
Related
I'm having issues getting Celery/Celery Beat to schedule any tasks other than the ones in my celery.py file. I can see the tasks register with "celery inspect registered" but the tasks do not run on the schedule. I've read all of the docs and I'm a hitting a wall. Running Redis on Windows with Ubuntu WSL.
Test- runs fine every 10 seconds and shows up in my shell
PayClosedLines - is registered but doesn't come up in my shell when I run my celery worker.
/proj/proj/celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import apps
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
app = Celery('mysite')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()])
#app.task(bind=True)
def debug_task(self):
print('Request: [0!r}'.format(self.request))
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(10.0, test.s('test'), name='add every 10')
sender.add_periodic_task(10.0, )
sender.add_periodic_task(30.0, test.s(), expires=10)
#app.task
def test(arg):
print(arg)
'''
proj/proj/settings.py
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
from datetime import timedelta
CELERY_BEAT_SCHEDULE = {
'Payout bets every 10 seconds': {
'task': 'UFCBetting.tasks.PayClosedLines',
'schedule': timedelta(seconds=10.0),
},
}
CELERY_IMPORTS = ('UFCBetting.tasks',)
proj/app/task.py
from __future__ import absolute_import, unicode_literals
from .MyBookieTools import get_odds
from .models import BettingLines, Bets, CustomUser
from celery import task, shared_task
#task(name='UpdateLinesTable')
def UpdateLinesTable():
odds = get_odds()
for odd in odds:
bl = BettingLines()
bl.fighter = odd[0]
bl.line = odd[1]
bl.save()
#shared_task
def PayClosedLines():
unpaid_lines = BettingLines.objects.filter(result="W").exclude(payment_status=True)
print(unpaid_lines)
for line in unpaid_lines:
print(line)
if line.result == "W":
unpaid_bets = Bets.objects.filter(line_id=line.id)
print(unpaid_bets)
for bet in unpaid_bets:
user = CustomUser.objects.get(id=bet.placed_by_id)
user.balance = user.balance + line.payout(amount=bet.bet_amount)
user.save()
line.payment_status = 1
line.save()
print("Closed lines paid.")
Your settings do not have the CELERY_BEAT_SCHEDULER constant.
If you add this to settings, you don't need to use the --scheduler option
CELERY_BEAT_SCHEDULER = `django_celery_beat.schedulers:DatabaseScheduler`
I had django-celery-beat installed so I needed to start my beat worker like this.
celery -A mysite beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
instead of the usual
celery -A mysite beat
I have a simple celery task set up. To run it, i first fired off the redis-server, then activated virtual env and entered "celery beat", opened a new terminal window channeling into the virtual env and entered "celery worker"
Flask==1.0.2
celery==4.2.1
requests==2.19
This is the error message afterwards:
consumer: Cannot connect to amqp://guest:**#127.0.0.1:5672//: timed
out.
This is the config details shown after executing 'celery beat':
Configuration ->
. broker -> amqp://guest:**#localhost:5672//
. loader -> celery.loaders.default.Loader
. scheduler -> celery.beat.PersistentScheduler
. db -> celerybeat-schedule
. logfile -> [stderr]#%WARNING
. maxinterval -> 5.00 minutes (300s)
flask-proj/app/__init__.py
from flask import Flask, request, jsonify
from celery import Celery
import celeryconfig
app = Flask(__name__)
app.config.from_object('config')
def make_celery(app):
# create context tasks in celery
celery = Celery(
app.import_name,
broker=app.config['BROKER_URL']
)
celery.conf.update(app.config)
celery.config_from_object(celeryconfig)
TaskBase = celery.Task
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
celery.Task = ContextTask
return celery
celery = make_celery(app)
#app.route("/")
def hello():
return "Hello World!"
flask-proj/tasks/test.py
import celery
#celery.task()
def print_hello():
logger = print_hello.get_logger()
logger.info("Hello")
flask-proj/config.py
import os
REDIS_HOST = "127.0.0.1" REDIS_PORT = 6379 BROKER_URL = environ.get('REDIS_URL', "redis://{host}:{port}/0".format(
host=REDIS_HOST, port=str(REDIS_PORT))) CELERY_RESULT_BACKEND = BROKER_URL
flask-proj/celeryconfig.py
from celery.schedules import crontab
CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYBEAT_SCHEDULE = {
'test-celery': {
'task': 'app.tasks.test.print_hello',
# Every minute
'schedule': crontab(minute="*"),
}
}
Please let me know if i need to provide other details.
Had the same problem in Django but my issue turned out to be using "BROKER_URL" instead of "CELERY_BROKER_URL" in settings.py. Celery wasn't finding the URL and was defaulting to the rabbitmq port instead of the redis port.
Remove celery.conf.update(app.config) line from make_celery() function, hence it will be like,
def make_celery(app):
# create context tasks in celery
celery = Celery(
app.import_name,
broker=app.config['BROKER_URL']
)
celery.conf.update(app.config) # remove this line.
celery.config_from_object(celeryconfig)
TaskBase = celery.Task
and,
copy paste contents of flask-proj/config.py to flask-proj/celeryconfig.py. Hence he flask-proj/celeryconfig.py will be like,
from celery.schedules import crontab
import os
REDIS_HOST = "127.0.0.1"
REDIS_PORT = 6379
BROKER_URL = os.environ.get(
'REDIS_URL', "redis://{host}:{port}/0".format(
host=REDIS_HOST, port=str(REDIS_PORT)))
CELERY_RESULT_BACKEND = BROKER_URL
CELERY_IMPORTS = ('app.tasks.test')
CELERY_TASK_RESULT_EXPIRES = 30
CELERY_TIMEZONE = 'UTC'
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERYBEAT_SCHEDULE = {
'test-celery': {
'task': 'app.tasks.test.print_hello',
# Every minute
'schedule': crontab(minute="*"),
}
}
amqp is rabbitmq not redis.
Redis is typically
redis://:password#hostname:port/db_number
I would manually the config to see if it works.
flask_app.config.update(
CELERY_BROKER_URL='redis://localhost:6379',
CELERY_RESULT_BACKEND='redis://localhost:6379'
)
I am trying to setup a task to run every ten seconds.Using Celery Beat.
I am using:
Django==1.11.3
celery==4.1.0
django-celery-beat==1.1.1
django-celery-results==1.0.1
It is giving me the following error:
Received unregistered task of type 'operations.tasks.message'
I am new to Celery, I have tried numerous solutions and cannot seem to find a solution,would appreciate the help
settings.py
CELERY_BROKER_URL = 'pyamqp://guest#localhost//'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Africa/Johannesburg'
CELERY_BEAT_SCHEDULE = {
'message': {
'task': 'operations.tasks.message',
'schedule': 10.0
}
}
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nodiso.settings')
app = Celery('nodiso')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
__init__.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ['celery_app']
task.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from operations import models
from .celery import periodic_task
#task
def message():
t = models.Celerytest.objects.create(Message='Hello World')
t.save()
files structure
proj-
proj-
__init__.py
settings.py-
celery.py-
app-
tasks.py-
Within my celery.py file I define app like this:
app = Celery(
'your_celery_app_name',
include=[
'your_celery_app_name.module.task1',
'your_celery_app_name.module.task2',
]
)
app.config_from_object('your_celery_app_name.celeryconfig')
My celeryconfig.py is where I define my beats and other settings (I think this would be same as your settings.py).
Below is probably not relevant - I'm not an expert with Python and how package should be put together - but from my limited understanding your tasks should be a submodule of your celery app module. Take this with pinch of salt though.
My project structure looks more like this:
your_celery_app_name (dir)
setup.py (file)
your_celery_app_name (dir)
__init__.py (file)
celery.py (file)
celeryconfig.py (file)
module (dir)
__init__.py (importing task1 and task2 from tasks)
tasks.py (implementing task1 and task2)
I am trying to use celery in an app to run long tasks asynchronously.
In the top project folder I have application.py:
from flask_stormpath import StormpathManager
from app import create_app
from celery import Celery
app = create_app('DevelopmentConfig')
stormpath_manager = StormpathManager(app)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
if __name__ == '__main__':
app.run()
The config.py looks like this:
class Config:
SECRET_KEY = 'something_very_secret'
broker_url = 'sqs://'
broker_transport_options = {'region': 'eu-west-1',
'visibility_timeout': 3600,
'polling_interval': 0.3,
'queue_name_prefix': 'celery-'}
csrf = SECRET_KEY
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
#staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis: //localhost:6379/0'
class ProductionConfig(Config):
JUST_SOMETHING = 'a_little_trick'
DEBUG = True
STORMPATH_API_KEY_FILE = '/.stormpath/apiKey.properties'
STORMPATH_APPLICATION = 'flask-test'
CELERY_BROKER_URL = 'sqs://'
config = {'development': DevelopmentConfig,
'default': DevelopmentConfig}
and in my views.py I try to run a task:
from flask import render_template, flash, request, jsonify, Response
from wtforms import Form, validators, SelectMultipleField, widgets
from flask_stormpath import login_required
from . import main
import numpy as np
class MultiCheckboxField(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
#celery.task(bin=True)
def do_something(test, training_size, testing_size):
Now, when I run it like this, I get the message that #celery.task the name celery was not defined. Fair point, so I changed it to #main.celery.task. When I do this, I get the error message "AttributeError: 'Blueprint' object has no attribute 'celery'.
Then I tried to initiate celery in the init.py file:
from flask import Flask
from celery import Celery
def create_app(config_name):
app = Flask(__name__)
configuration = "config."+config_name
app.config.from_object(configuration)
celery = Celery(app.name, broker=app.config.from_object('CELERY_BROKER_URL'))
celery.conf.update(app.config)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
When I do this, I get the error: ImportError: No module named CELERY_BROKER_URL
So, I don't know, where to import and initiate celery and how to create a blueprint so that I can use celery.task in views.py. Any help would be highly appreciated.
I'm making an app in FLASK and I've incorporated Celery into it. However, I have to run the app via the terminal if I want the Celery worker to work as well. (celery -A app.celery worker). I tried running it from the main run.py file as follows
init.py
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.mail import Mail
from celery import Celery
from kombu import serialization
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
mail = Mail(app)
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
app.config['CELERY_ACCEPT_CONTENT'] = ['json']
app.config['CELERY_TASK_SERIALIZER'] = 'json'
app.config['CELERY_RESULT_SERIALIZER'] = 'json'
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
serialization.registry._decoders.pop("application/x-python-serialize")
from app import views
and run.py
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
app = current_app._get_current_object()
worker = worker.worker(app=app)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)
But this gives the error AttributeError: 'Celery' object has no attribute 'config'
Any pointers as to what Im doing wrong would be much appreciated.
Your run.py should be:
#!flask/bin/python
from __future__ import absolute_import, unicode_literals
from app import app
# app.run(debug=True, port=9001)
from celery import current_app
from celery.bin import worker
application = current_app._get_current_object()
worker = worker.worker(app=application)
options = {
'broker': app.config['CELERY_BROKER_URL'],
'loglevel': 'INFO',
'traceback': True,
}
worker.run(**options)