what is it?
allows celery tasks to be run separately as background services from main django application
can be configured using init scripts, supervisor, systemd
using systemd
create a celery config file
# Name of nodes to start
# here we have a single node
CELERYD_NODES="w1"
# Absolute or relative path to the 'celery' command:
CELERY_BIN="/opt/repos/contactable/fica_face/fica_face_env/bin/celery"
# Django App instance to use
# comment out this line if you don't use an app
CELERY_APP="fica_face"
# How to call manage.py
CELERYD_MULTI="multi"
# Extra command-line arguments to the worker
CELERYD_OPTS="--time-limit=900 --concurrency=4"
# - %n will be replaced with the first part of the nodename.
# and is important when using the prefork pool to avoid race conditions.
CELERYD_PID_FILE="/var/run/celery/%n.pid"
CELERYD_LOG_FILE="/var/log/celery/celery.log"
CELERYD_LOG_LEVEL="INFO"create a celery.service file
[Unit]
Description=Celery Service
After=network.target
[Service]
LimitNOFILE=10000
Type=forking
# have to use apache user and group
User=www-data
Group=www-data
# path to celery config file
EnvironmentFile=/opt/repos/contactable/fica_face/deploy/celery/celery
WorkingDirectory=/opt/repos/contactable/fica_face
ExecStart=/bin/sh -c '${CELERY_BIN} -A $CELERY_APP multi start $CELERYD_NODES \
--pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
--loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS'
ExecStop=/bin/sh -c '${CELERY_BIN} multi stopwait $CELERYD_NODES \
--pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
--loglevel="${CELERYD_LOG_LEVEL}"'
ExecReload=/bin/sh -c '${CELERY_BIN} -A $CELERY_APP multi restart $CELERYD_NODES \
--pidfile=${CELERYD_PID_FILE} --logfile=${CELERYD_LOG_FILE} \
--loglevel="${CELERYD_LOG_LEVEL}" $CELERYD_OPTS'
Restart=always
[Install]
WantedBy=multi-user.targetcreate a celery.conf file
d /run/celery 0755 www-data www-data -
d /var/log/celery 0755 www-data www-data -create the celery log and pid directories
sudo cp celery.conf /etc/tmpfiles.d/
sudo systemd-tmpfiles --create /etc/tmpfiles.d/celery.confenable and start the celery service
sudo cp celery.service /lib/systemd/system/celery.service
sudo systemctl enable celery.service
sudo service celery startdjango setup and configuration
In the primary django application (in this example’s case the app is fica_face), create a celery.py file
import os
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fica_face.settings")
app = Celery("fica_face")
app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks()update the django settings.py file to connect to a redis queue
CELERY_BROKER_URL = "redis://<REDIS IP ADR>:6379"
CELERY_RESULT_BACKEND = "redis://<REDIS IP ADR>:6379"
# config for different queues
ATHENA_QUEUE = <UNIQUE QUEUE NAME 1>
ASTRIA_QUEUE = <UNIQUE QUEUE NAME 2>
CELERY_TASK_QUEUES = (
Queue(ATHENA_QUEUE),
Queue(ASTRIA_QUEUE),
)for the worker to listen for tasks on the queue, use the celery.task or celery.shared_task decorators
@shared_task(queue=settings.ATHENA_QUEUE)
def queue_post_api_request(*args, **kwargs):
# whatever the task needs to do, the shared_task decorator means the function is able to use the django ORM to access the database even if it is run by a daemonized worker outside of the django application
id = kwargs.get("id")
model_instance = SomeModel.objects.filter(id=id)for a task to be added to the queue, use the delay() method
from tasks import queue_post_api_request
queue_post_api_request.delay(id="foo")