Djcelery : How to save celery task status into database - python

My celery task runs well
But I found my djcelery_taskstate table has nothing :
How can it save data into it??
Here is my setting :
settings.py
BROKER_URL = 'redis://localhost:6379'
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_RESULT_BACKEND = 'djcelery.backends.database:DatabaseBackend'
CELERY_TIMEZONE = 'Asia/Taipei'
celery.py
from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myproject.settings')
from django.conf import settings # noqa
app = Celery('myproject')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
CELERY_TIMEZONE = 'Asia/Taipei'
)
tasks.py
from __future__ import absolute_import
from celery.task.schedules import crontab
from celery.decorators import periodic_task
from celery import task
from datetime import timedelta
from myapp.deal import check_data
#periodic_task(run_every=timedelta(seconds=30),name="collect-rawdata",ignore_result=True)
def rawdata():
return check_data()
Here is my command to run celery :
one terminal run:
celery -A wiarea worker -l info
another run:
celery -A wiarea beat -l info

You have set ignore_result to True. This makes Celery drop the result and not save it anywhere.

Related

Celery registers task but beat is not scheduling tasks from installed apps

I'm having issues getting Celery/Celery Beat to schedule any tasks other than the ones in my celery.py file. I can see the tasks register with "celery inspect registered" but the tasks do not run on the schedule. I've read all of the docs and I'm a hitting a wall. Running Redis on Windows with Ubuntu WSL.
Test- runs fine every 10 seconds and shows up in my shell
PayClosedLines - is registered but doesn't come up in my shell when I run my celery worker.
/proj/proj/celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.apps import apps
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mysite.settings')
app = Celery('mysite')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()])
#app.task(bind=True)
def debug_task(self):
print('Request: [0!r}'.format(self.request))
#app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(10.0, test.s('test'), name='add every 10')
sender.add_periodic_task(10.0, )
sender.add_periodic_task(30.0, test.s(), expires=10)
#app.task
def test(arg):
print(arg)
'''
proj/proj/settings.py
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'UTC'
from datetime import timedelta
CELERY_BEAT_SCHEDULE = {
'Payout bets every 10 seconds': {
'task': 'UFCBetting.tasks.PayClosedLines',
'schedule': timedelta(seconds=10.0),
},
}
CELERY_IMPORTS = ('UFCBetting.tasks',)
proj/app/task.py
from __future__ import absolute_import, unicode_literals
from .MyBookieTools import get_odds
from .models import BettingLines, Bets, CustomUser
from celery import task, shared_task
#task(name='UpdateLinesTable')
def UpdateLinesTable():
odds = get_odds()
for odd in odds:
bl = BettingLines()
bl.fighter = odd[0]
bl.line = odd[1]
bl.save()
#shared_task
def PayClosedLines():
unpaid_lines = BettingLines.objects.filter(result="W").exclude(payment_status=True)
print(unpaid_lines)
for line in unpaid_lines:
print(line)
if line.result == "W":
unpaid_bets = Bets.objects.filter(line_id=line.id)
print(unpaid_bets)
for bet in unpaid_bets:
user = CustomUser.objects.get(id=bet.placed_by_id)
user.balance = user.balance + line.payout(amount=bet.bet_amount)
user.save()
line.payment_status = 1
line.save()
print("Closed lines paid.")
Your settings do not have the CELERY_BEAT_SCHEDULER constant.
If you add this to settings, you don't need to use the --scheduler option
CELERY_BEAT_SCHEDULER = `django_celery_beat.schedulers:DatabaseScheduler`
I had django-celery-beat installed so I needed to start my beat worker like this.
celery -A mysite beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler
instead of the usual
celery -A mysite beat

Celery 4.1 periodic tasks error

I am trying to setup a task to run every ten seconds.Using Celery Beat.
I am using:
Django==1.11.3
celery==4.1.0
django-celery-beat==1.1.1
django-celery-results==1.0.1
It is giving me the following error:
Received unregistered task of type 'operations.tasks.message'
I am new to Celery, I have tried numerous solutions and cannot seem to find a solution,would appreciate the help
settings.py
CELERY_BROKER_URL = 'pyamqp://guest#localhost//'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Africa/Johannesburg'
CELERY_BEAT_SCHEDULE = {
'message': {
'task': 'operations.tasks.message',
'schedule': 10.0
}
}
celery.py
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nodiso.settings')
app = Celery('nodiso')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
__init__.py
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ['celery_app']
task.py
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from operations import models
from .celery import periodic_task
#task
def message():
t = models.Celerytest.objects.create(Message='Hello World')
t.save()
files structure
proj-
proj-
__init__.py
settings.py-
celery.py-
app-
tasks.py-
Within my celery.py file I define app like this:
app = Celery(
'your_celery_app_name',
include=[
'your_celery_app_name.module.task1',
'your_celery_app_name.module.task2',
]
)
app.config_from_object('your_celery_app_name.celeryconfig')
My celeryconfig.py is where I define my beats and other settings (I think this would be same as your settings.py).
Below is probably not relevant - I'm not an expert with Python and how package should be put together - but from my limited understanding your tasks should be a submodule of your celery app module. Take this with pinch of salt though.
My project structure looks more like this:
your_celery_app_name (dir)
setup.py (file)
your_celery_app_name (dir)
__init__.py (file)
celery.py (file)
celeryconfig.py (file)
module (dir)
__init__.py (importing task1 and task2 from tasks)
tasks.py (implementing task1 and task2)

Django + Celery 4 wrong settings

I'm using Django 1.10 and Celery 4.
I found leaks in Celery's Documentation :(
The worker configuration is well done and It run fine (I can see the worker connected in RabbitMQ webmin). But my tasks can't connect to RabbitMQ to publish their messages.
settings.py
CELERY_BROKER_URL = 'amqp://dev:dev#localhost/dev_virtualhost'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
celery.py
from __future__ import absolute_import
import os
from celery import Celery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
app = Celery('dev_app')
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
#app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
tasks.py
from celery import shared_task
#shared_task(queue='data_to_write')
def test_task(data):
open('/tmp/test', 'w').write(data)
From Django's Shell I run this code and fail :(
test_task.delay('hello world')
AccessRefused: (0, 0): (403) ACCESS_REFUSED - Login was refused using authentication mechanism AMQPLAIN. For details see the broker logfile.
In the RabbitMQ logs I see that credentials are guest:guest, no dev:dev like I wrote in the settings.py.
Where is my error? Thanks
The solution consists in rename celery.py to celery_app.py to avoid auto import.
Then you must execute celery in this way:
celery --app=PACKAGE.celery_app:app worker
PACKAGE is the module (folder) where you put the celery_app.py

DJANGO celery task is executed from shell but it's not executed from view

I am trying to create some asynchronous tasks with celery in my django application
settings.py
BROKER_URL = 'django://localhost:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
celery.py:
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'provcon.settings')
app = Celery('provcon')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
project __init__py:
from __future__ import absolute_import
from .celery import app as celery_app
tasks.py:
from __future__ import absolute_import
from celery import shared_task
from celery import task
from .models import Proc_Carga
#task()
def carga_ftp():
tabla = Proc_Carga()
sp = tabla.carga()
return None
I call the asysnchronous tasks from my view like this:
from .tasks import carga_ftp
#login_required(login_url='/login/')
def archivoview(request):
usuario= request.user
if request.method == 'POST':
form = ProcFTPForm(usuario,request.POST)
if form.is_valid():
form.save()
proc = Lista_Final()
lista = proc.archivos()
# call asynchronous task
carga_ftp.delay()
return HttpResponseRedirect('/resumen/')
else:
form = ProcFTPForm(usuario)
return render_to_response('archivo.html',{'form':form},context_instance=RequestContext(request))
When I run the task from the python manage.py shell. The worker is executed and create the database objects without any problem
but when I try to execute the task from the view, doesn't work is not executed
Any idea why the task run from the manage.py shell but not from the views
Thanks in advance
Same problem with me.
Task is working in python shell but it is not working in Django views.
I followed this
http://docs.celeryproject.org/en/latest/django/first-steps-with-django.html
I was missing namespace in app.config_from_object, after adding namespace='CELERY', It is working fine.
app.config_from_object('django.conf:settings', namespace='CELERY')
Check if redis is running
$redis-cli ping
Check if the celery worker is running in the django admin interface
If not running execute this command
celery -A provcon worker -l info
Test your task from your django application
If works run the celery worker in background like a daemon

Save the Celery task in DB- Django

I'm referring to Django Celery documents.
I created celery.py in my proj/proj just as the document said. and then included __init__.py
celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj.settings')
app = Celery('proj')
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
)
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
__init__.py
from __future__ import absolute_import
from .celery import app as celery_app
I installed pip install django-celery , then migrated python manage.py migrate djcelery
It made some of the tables in my DB.
tasks.py
from __future__ import absolute_import
from celery import shared_task
import requests
import json
#shared_task
def post_notification(data,url):
headers = {'content-type': 'application/json'}
requests.post(url, data=json.dumps(data), headers=headers)
After that I called my task in my views as
task = post_notification.delay(data,url)
print task.id #it prints an id
print task.status # prints PENDING
But nothing gets logged into any of my tables.
I've read my threads on SO,Thread1 , Thread2 and many more given on these threads, but nothing happens.
It provides me the ID & status of the task but how do I save the task in the DB? Usually it should get logged into celery_taskmeta, but there's nothing in there.
Though the task gets execute but I want to save the task in DB as well. How can I do it? Is there something I'm missing?
try this in celery.py
from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'app_name.dev_settings')
app = Celery('app_name')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
app.conf.CELERY_TIMEZONE = 'UTC'
app.conf.update(
CELERY_RESULT_BACKEND='djcelery.backends.database:DatabaseBackend',
)
Add following in settings.py file
BROKER_URL = 'amqp://guest:guest#localhost//'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
And start the worker.

Categories