I am attempting to use celery within my Django app to speed up the processing time of a function and I can't get it to work correctly. I am using RabbitMQ.
My tasks.py
from celery import Celery
from celery import shared_task,current_task
from myapp.celery import app
#app.task
def add(x,y):
for i in range(25000000):
a = x+y
return x+y
My python code
def test_multi_func():
x = 5
y = 10
i = 15
print(f"start = {time.perf_counter()}")
while i > 0:
g = add.delay(x,y)
result = add.AsyncResult(g)
i -= 1
print(result.backend)
print(result.status)
print(f"end = {time.perf_counter()}")
print(f"g = {g.get()}")
print(f"x = {x} ; y = {y}")
My settings.py
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_select2',
'chartjs',
'django_material_icons',
'material',
'django_celery_results',
'celery_progress',
'django_apscheduler'
]
BROKER_URL = 'django://'
result_backend = 'django-db'
CELERY_RESULT_BACKEND = 'django-db'
result_persistent = True
task_result_expires = None
send_events = True
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_BROKER_URL = 'amqp://localhost'
CELERY_CACHE_BACKEND = 'django-cache'
CELERY_IGNORE_RESULT = False
my celery.py
import os
from celery import Celery
# Set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myapp.settings')
app = Celery('myapp',
backend='amqp://guest#localhost:15672/',
broker='amqp://guest#localhost:15672/',
include=['myapp.tasks'])
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}')
What am I doing wrong ?
I have a difficult time understanding this. It triggers the test_multi_func() and then is stuck as pending within RabbitMQ and nothing happens. Would really appreciate it if someone could help me understand what I need to do differently. I've tried many different iterations of different code I could find online and nothing seems to work.
Your celery configuration has many conflicts, and I'm wondering how you're running the worker. Specifically,
In your settings.py, you're mixing the legacy and new styles of configuring Celery. The lowercased configuration settings are preferred for Celery 4 and beyond: https://docs.celeryq.dev/en/stable/userguide/configuration.html#new-lowercase-settings
You've configured the broker URLs twice (amqp://guest#localhost:15672/ and amqp://localhost), and you've configured two different result backends.
You've passed in the configuration to the Celery instance three times, and the settings are stomping on each other. Specifically, when we pass your settings directly into the Celery class is should look like the following:
app = Celery('myapp',
backend_url='amqp://guest#localhost:15672/',
broker_backend='amqp://guest#localhost:15672/',
include=['myapp.tasks'])
You can also create the Celery instance and configure it in two steps if you prefer:
app = Celery('myapp')
app.conf.update(broker_url='amqp://guest#localhost:15672/',
result_backend='amqp://guest#localhost:15672/')
Related
I'm having issues getting Celery/Celery Beat to schedule any tasks other than the ones in my celery.py file. I can see the tasks register with "celery inspect registered" but the tasks do not run on the schedule. I've read all of the docs and I'm a hitting a wall. Running Redis on Windows with Ubuntu WSL.
Test- runs fine every 10 seconds and shows up in my shell
PayClosedLines - is registered but doesn't come up in my shell when I run my celery worker.
/proj/proj/celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import apps
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
app = Celery('mysite')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()])
#app.task(bind=True)
def debug_task(self):
print('Request: [0!r}'.format(self.request))
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(10.0, test.s('test'), name='add every 10')
sender.add_periodic_task(10.0, )
sender.add_periodic_task(30.0, test.s(), expires=10)
#app.task
def test(arg):
print(arg)
'''
proj/proj/settings.py
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
from datetime import timedelta
CELERY_BEAT_SCHEDULE = {
'Payout bets every 10 seconds': {
'task': 'UFCBetting.tasks.PayClosedLines',
'schedule': timedelta(seconds=10.0),
},
}
CELERY_IMPORTS = ('UFCBetting.tasks',)
proj/app/task.py
from __future__ import absolute_import, unicode_literals
from .MyBookieTools import get_odds
from .models import BettingLines, Bets, CustomUser
from celery import task, shared_task
#task(name='UpdateLinesTable')
def UpdateLinesTable():
odds = get_odds()
for odd in odds:
bl = BettingLines()
bl.fighter = odd[0]
bl.line = odd[1]
bl.save()
#shared_task
def PayClosedLines():
unpaid_lines = BettingLines.objects.filter(result="W").exclude(payment_status=True)
print(unpaid_lines)
for line in unpaid_lines:
print(line)
if line.result == "W":
unpaid_bets = Bets.objects.filter(line_id=line.id)
print(unpaid_bets)
for bet in unpaid_bets:
user = CustomUser.objects.get(id=bet.placed_by_id)
user.balance = user.balance + line.payout(amount=bet.bet_amount)
user.save()
line.payment_status = 1
line.save()
print("Closed lines paid.")
Your settings do not have the CELERY_BEAT_SCHEDULER constant.
If you add this to settings, you don't need to use the --scheduler option
CELERY_BEAT_SCHEDULER = `django_celery_beat.schedulers:DatabaseScheduler`
I had django-celery-beat installed so I needed to start my beat worker like this.
celery -A mysite beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
instead of the usual
celery -A mysite beat
I have a periodic_task that works if I set crontab to run every hour, it works. But if I set it to something specific like hour="13" minute="5" (run at 1:05PM) it doesnt work. The problem might be in timezone configuration. Am I doing something wrong?
settings.py
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Manila'
USE_I18N = True
USE_L10N = True
USE_TZ = True
celery_app.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'IrisOnline.settings')
app = Celery('IrisOnline', broker='redis://localhost:6379/0',include=[
"IrisOnline.tasks",
"order_management.tasks"
])
app.conf.update(
CELERY_TIMEZONE = 'Asia/Manila'
)
app.conf.update(
broker_url = 'redis://localhost:6379',
result_backend = 'redis://localhost:6379',
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='Asia/Manila',
)
app.conf.beat_schedule = {
'add-every-30-seconds': {
'task': 'IrisOnline.tasks.printthis',
'schedule':(crontab(hour=13,minute=33)),
},
}
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
I'm using Django 1.10 and Celery 4.
I found leaks in Celery's Documentation :(
The worker configuration is well done and It run fine (I can see the worker connected in RabbitMQ webmin). But my tasks can't connect to RabbitMQ to publish their messages.
settings.py
CELERY_BROKER_URL = 'amqp://dev:dev#localhost/dev_virtualhost'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
celery.py
from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
app = Celery('dev_app')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from celery import shared_task
#shared_task(queue='data_to_write')
def test_task(data):
open('/tmp/test', 'w').write(data)
From Django's Shell I run this code and fail :(
test_task.delay('hello world')
AccessRefused: (0, 0): (403) ACCESS_REFUSED - Login was refused using authentication mechanism AMQPLAIN. For details see the broker logfile.
In the RabbitMQ logs I see that credentials are guest:guest, no dev:dev like I wrote in the settings.py.
Where is my error? Thanks
The solution consists in rename celery.py to celery_app.py to avoid auto import.
Then you must execute celery in this way:
celery --app=PACKAGE.celery_app:app worker
PACKAGE is the module (folder) where you put the celery_app.py
I am trying to setup a dummy task in Celery that runs every 3 seconds but have had little success so far. This is the output I am getting:
I've set up celery as follows:
In settings.py:
from datetime import timedelta
BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
CELERY_IMPORTS = ("api.tasks")
CELERYBEAT_SCHEDULE = {
'add_job': {
'task': 'add_job',
'schedule': timedelta(seconds=3),
'args': (16, 16)
},
}
CELERY_TIMEZONE = 'UTC'
In celery.py:
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'blogpodapi.settings')
app = Celery(
'blogpodapi',
)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
In tasks.py
from celery.task import task
#task(name='add_job')
def add_job(x, y):
r = x + y
print "task arguments: {x}, {y}".format(x=x, y=y)
print "task result: {r}".format(r=r)
return r
Have I done anything wrong in the way that I have set it up?
Only answering because this was the first result when I searched on CELERYBEAT_SCHEDULE.
Reason it didn't work for me, was because it should be CELERY_BEAT_SCHEDULE
Okay the very basic mistake I see is that most of the settings you've mentioned in your settings.py need to go in celery.py
Specially the CELERYBEAT_SCHEDULE
You are doing everything right, its just that your Celery is waiting for a task, which it is never receiving as it reads from celery.py and not from the settings.py. Hence nothing is happening.
See my celery.py and also the settings.py for a reference.
celery.py -> https://github.com/amyth/hammer/blob/master/config/celery.py
settings.py -> https://github.com/amyth/hammer/blob/master/config/settings.py
I have used crontab, coz I wanted to execute the task at a particular time of the day. So you don't need to worry about it. Yours is perfect for what you want to do.
Also from wherever whatever blog or tutorial you are following celery, check again what exactly are those settings required for and whether you need all all of them or not.
As to why your task does not run: it is not registered. If it was, the output of the Celery worker when it starts would be different - it would contain the following two lines (at least):
[tasks]
. add_job
I am trying to run a basic example from the Celery docs, but when I run 'from tasks import add' it gives me an error saying the 'module is not found'.
These are the files I have changed.
proj/proj/celery.py
from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
from django.conf import settings
app = Celery('proj')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
proj/proj/settings.py
BROKER_URL = 'amqp://guest:guest#localhost//'
#: Only add pickle to this list if your broker is secured
#: from unwanted access (see userguide/security.html)
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'celerymod',
'djcelery',
)
proj/proj/init.py
from __future__ import absolute_import
from .celery import app as celery_app
proj/celerymod/tasks.py
from __future__ import absolute_import
from celery import shared_task
#shared_task
def add(x, y):
return x + y
#shared_task
def mul(x, y):
return x * y
#shared_task
def xsum(numbers):
return sum(numbers)
Appreciate any suggestions. Thank you!
I had the same problem,
my tasks in the apps were not registered.
I could fix this by moving the import of celery.py into the settings.py instead of init.py
proj/proj/init.py
from __future__ import absolute_import
# is empty
proj/proj/settings.py
from .celery import app as celery_app
BROKER_URL = 'amqp://guest:guest#localhost//'
#: Only add pickle to this list if your broker is secured
#: from unwanted access (see userguide/security.html)
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'celerymod',
'djcelery',
)
If anyone else have the same issue. Do this in shell. myapp is the name of the application, in my case it is proj.
from myapp.tasks import add