django celery crontab periodic_task hourly not working - python

I have a periodic_task that works if I set crontab to run every hour, it works. But if I set it to something specific like hour="13" minute="5" (run at 1:05PM) it doesnt work. The problem might be in timezone configuration. Am I doing something wrong?
settings.py
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Manila'
USE_I18N = True
USE_L10N = True
USE_TZ = True
celery_app.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'IrisOnline.settings')
app = Celery('IrisOnline', broker='redis://localhost:6379/0',include=[
"IrisOnline.tasks",
"order_management.tasks"
])
app.conf.update(
CELERY_TIMEZONE = 'Asia/Manila'
)
app.conf.update(
broker_url = 'redis://localhost:6379',
result_backend = 'redis://localhost:6379',
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='Asia/Manila',
)
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'IrisOnline.tasks.printthis',
'schedule':(crontab(hour=13,minute=33)),
},
}
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)

Related

Celery not loading some configuration variable

I spent a lot of hours trying to understand why some celery configuration variable (imports and beat_schedule) from my config file are not loaded.
I would like to centralize all my config variables in config.py, but BEAT_SCHEDULE, IMPORTS and some other config variables are not taken into account.
config.py
import os
from dotenv import load_dotenv
load_dotenv()
# Find the absolute file path to the top level project directory
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
"""
Base configuration class. Contains default configuration settings + configuration settings applicable to all environments.
"""
# Default settings
FLASK_ENV = 'development'
DEBUG = False
TESTING = False
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ACCEPT_CONTENT = ['json', 'raw']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_IMPORTS = ['app.tasks']
CELERY_BEAT_SCHEDULE = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
The make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.flask_app = app
celery.Task = AppContextTask
return celery
I create the Celery app and initialize it in init.py
from flask_celeryext import FlaskCeleryExt
ext_celery = FlaskCeleryExt(create_celery_app=make_celery)
def create_app():
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
# Configure the flask app instance
CONFIG_TYPE = os.getenv('CONFIG_TYPE', default='config.Config')
app.config.from_object(CONFIG_TYPE)
ext_celery.init_app(app)
print(ext_celery.celery.conf.humanize(with_defaults=False, censored=True))
If I check the console, I can't see any CELERY_BEAT_SCHEDULE option set.
I tried with different config variable name (CELERYBEAT_SCHEDULE, BEAT_SCHEDULE, beat_schedule), nothing work.
But if I modify my make_celery function:
import os
from celery import current_app as current_celery_app
from flask_celeryext import AppContextTask
def make_celery(app):
celery = current_celery_app
print(app.config)
celery.config_from_object(app.config)
celery.conf.imports = ['app.tasks']
celery.conf.beat_schedule = {
'run-me-every-ten-seconds': {
'task': 'app.tasks.add',
'schedule': 10.0,
'args': (10,3)
}
}
celery.flask_app = app
celery.Task = AppContextTask
return celery
It is working as expected
Why BEAT_SCHEDULE and IMPORTS are not loaded from my config.py?
Thanks

Celery registers task but beat is not scheduling tasks from installed apps

I'm having issues getting Celery/Celery Beat to schedule any tasks other than the ones in my celery.py file. I can see the tasks register with "celery inspect registered" but the tasks do not run on the schedule. I've read all of the docs and I'm a hitting a wall. Running Redis on Windows with Ubuntu WSL.
Test- runs fine every 10 seconds and shows up in my shell
PayClosedLines - is registered but doesn't come up in my shell when I run my celery worker.
/proj/proj/celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import apps
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
app = Celery('mysite')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()])
#app.task(bind=True)
def debug_task(self):
print('Request: [0!r}'.format(self.request))
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(10.0, test.s('test'), name='add every 10')
sender.add_periodic_task(10.0, )
sender.add_periodic_task(30.0, test.s(), expires=10)
#app.task
def test(arg):
print(arg)
'''
proj/proj/settings.py
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
from datetime import timedelta
CELERY_BEAT_SCHEDULE = {
'Payout bets every 10 seconds': {
'task': 'UFCBetting.tasks.PayClosedLines',
'schedule': timedelta(seconds=10.0),
},
}
CELERY_IMPORTS = ('UFCBetting.tasks',)
proj/app/task.py
from __future__ import absolute_import, unicode_literals
from .MyBookieTools import get_odds
from .models import BettingLines, Bets, CustomUser
from celery import task, shared_task
#task(name='UpdateLinesTable')
def UpdateLinesTable():
odds = get_odds()
for odd in odds:
bl = BettingLines()
bl.fighter = odd[0]
bl.line = odd[1]
bl.save()
#shared_task
def PayClosedLines():
unpaid_lines = BettingLines.objects.filter(result="W").exclude(payment_status=True)
print(unpaid_lines)
for line in unpaid_lines:
print(line)
if line.result == "W":
unpaid_bets = Bets.objects.filter(line_id=line.id)
print(unpaid_bets)
for bet in unpaid_bets:
user = CustomUser.objects.get(id=bet.placed_by_id)
user.balance = user.balance + line.payout(amount=bet.bet_amount)
user.save()
line.payment_status = 1
line.save()
print("Closed lines paid.")
Your settings do not have the CELERY_BEAT_SCHEDULER constant.
If you add this to settings, you don't need to use the --scheduler option
CELERY_BEAT_SCHEDULER = `django_celery_beat.schedulers:DatabaseScheduler`
I had django-celery-beat installed so I needed to start my beat worker like this.
celery -A mysite beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
instead of the usual
celery -A mysite beat

Celery 4.1 periodic tasks error

I am trying to setup a task to run every ten seconds.Using Celery Beat.
I am using:
Django==1.11.3
celery==4.1.0
django-celery-beat==1.1.1
django-celery-results==1.0.1
It is giving me the following error:
Received unregistered task of type 'operations.tasks.message'
I am new to Celery, I have tried numerous solutions and cannot seem to find a solution,would appreciate the help
settings.py
CELERY_BROKER_URL = 'pyamqp://guest#localhost//'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Africa/Johannesburg'
CELERY_BEAT_SCHEDULE = {
'message': {
'task': 'operations.tasks.message',
'schedule': 10.0
}
}
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nodiso.settings')
app = Celery('nodiso')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
__init__.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ['celery_app']
task.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from operations import models
from .celery import periodic_task
#task
def message():
t = models.Celerytest.objects.create(Message='Hello World')
t.save()
files structure
proj-
proj-
__init__.py
settings.py-
celery.py-
app-
tasks.py-
Within my celery.py file I define app like this:
app = Celery(
'your_celery_app_name',
include=[
'your_celery_app_name.module.task1',
'your_celery_app_name.module.task2',
]
)
app.config_from_object('your_celery_app_name.celeryconfig')
My celeryconfig.py is where I define my beats and other settings (I think this would be same as your settings.py).
Below is probably not relevant - I'm not an expert with Python and how package should be put together - but from my limited understanding your tasks should be a submodule of your celery app module. Take this with pinch of salt though.
My project structure looks more like this:
your_celery_app_name (dir)
setup.py (file)
your_celery_app_name (dir)
__init__.py (file)
celery.py (file)
celeryconfig.py (file)
module (dir)
__init__.py (importing task1 and task2 from tasks)
tasks.py (implementing task1 and task2)

Tasks in CELERYBEAT_SCHEDULE not being processed

I am trying to setup a dummy task in Celery that runs every 3 seconds but have had little success so far. This is the output I am getting:
I've set up celery as follows:
In settings.py:
from datetime import timedelta
BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
CELERY_IMPORTS = ("api.tasks")
CELERYBEAT_SCHEDULE = {
'add_job': {
'task': 'add_job',
'schedule': timedelta(seconds=3),
'args': (16, 16)
},
}
CELERY_TIMEZONE = 'UTC'
In celery.py:
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blogpodapi.settings')
app = Celery(
'blogpodapi',
)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
In tasks.py
from celery.task import task
#task(name='add_job')
def add_job(x, y):
r = x + y
print "task arguments: {x}, {y}".format(x=x, y=y)
print "task result: {r}".format(r=r)
return r
Have I done anything wrong in the way that I have set it up?
Only answering because this was the first result when I searched on CELERYBEAT_SCHEDULE.
Reason it didn't work for me, was because it should be CELERY_BEAT_SCHEDULE
Okay the very basic mistake I see is that most of the settings you've mentioned in your settings.py need to go in celery.py
Specially the CELERYBEAT_SCHEDULE
You are doing everything right, its just that your Celery is waiting for a task, which it is never receiving as it reads from celery.py and not from the settings.py. Hence nothing is happening.
See my celery.py and also the settings.py for a reference.
celery.py -> https://github.com/amyth/hammer/blob/master/config/celery.py
settings.py -> https://github.com/amyth/hammer/blob/master/config/settings.py
I have used crontab, coz I wanted to execute the task at a particular time of the day. So you don't need to worry about it. Yours is perfect for what you want to do.
Also from wherever whatever blog or tutorial you are following celery, check again what exactly are those settings required for and whether you need all all of them or not.
As to why your task does not run: it is not registered. If it was, the output of the Celery worker when it starts would be different - it would contain the following two lines (at least):
[tasks]
. add_job

Djcelery : How to save celery task status into database

My celery task runs well
But I found my djcelery_taskstate table has nothing :
How can it save data into it??
Here is my setting :
settings.py
BROKER_URL = 'redis://localhost:6379'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_TIMEZONE = 'Asia/Taipei'
celery.py
from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myproject.settings')
from django.conf import settings # noqa
app = Celery('myproject')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
CELERY_TIMEZONE = 'Asia/Taipei'
)
tasks.py
from __future__ import absolute_import
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery import task
from datetime import timedelta
from myapp.deal import check_data
#periodic_task(run_every=timedelta(seconds=30),name="collect-rawdata",ignore_result=True)
def rawdata():
return check_data()
Here is my command to run celery :
one terminal run:
celery -A wiarea worker -l info
another run:
celery -A wiarea beat -l info
You have set ignore_result to True. This makes Celery drop the result and not save it anywhere.

Categories