diff --git a/README.md b/README.md
index 86c888aa27..e24de68ce0 100644
--- a/README.md
+++ b/README.md
@@ -45,6 +45,7 @@ _These features can be enabled during initial project setup._
- Serve static files from Amazon S3, Google Cloud Storage, Azure Storage or [Whitenoise](https://whitenoise.readthedocs.io/)
- Configuration for [Celery](https://docs.celeryq.dev) and [Flower](https://github.com/mher/flower) (the latter in Docker setup only)
+- Configuration for [Django-RQ](https://github.com/rq/django-rq) with [Valkey](https://valkey.io/) as an alternative task queue
- Integration with [Mailpit](https://github.com/axllent/mailpit/) for local email testing
- Integration with [Sentry](https://sentry.io/welcome/) for error logging
diff --git a/cookiecutter.json b/cookiecutter.json
index 377145d27d..4fd7a070a0 100644
--- a/cookiecutter.json
+++ b/cookiecutter.json
@@ -35,6 +35,7 @@
"use_drf": "n",
"frontend_pipeline": ["None", "Django Compressor", "Gulp", "Webpack"],
"use_celery": "n",
+ "use_django_rq": "n",
"use_mailpit": "n",
"use_sentry": "n",
"use_whitenoise": "n",
diff --git a/docs/1-getting-started/project-generation-options.rst b/docs/1-getting-started/project-generation-options.rst
index 5e6e06c8e9..a458ca1452 100644
--- a/docs/1-getting-started/project-generation-options.rst
+++ b/docs/1-getting-started/project-generation-options.rst
@@ -114,6 +114,9 @@ Both Gulp and Webpack support Bootstrap recompilation with real-time variables a
use_celery:
Indicates whether the project should be configured to use Celery_.
+use_django_rq:
+ Indicates whether the project should be configured to use Django-RQ_ with Valkey_ as an alternative task queue to Celery. Django-RQ provides a simpler, more lightweight approach to background task processing.
+
use_mailpit:
Indicates whether the project should be configured to use Mailpit_.
@@ -182,6 +185,10 @@ debug:
.. _Celery: https://github.com/celery/celery
+.. _Django-RQ: https://github.com/rq/django-rq
+
+.. _Valkey: https://valkey.io/
+
.. _Mailpit: https://github.com/axllent/mailpit
.. _Sentry: https://github.com/getsentry/sentry
diff --git a/docs/2-local-development/developing-locally-docker.rst b/docs/2-local-development/developing-locally-docker.rst
index ea6f3e887d..869c875e59 100644
--- a/docs/2-local-development/developing-locally-docker.rst
+++ b/docs/2-local-development/developing-locally-docker.rst
@@ -250,6 +250,35 @@ By default, it's enabled both in local and production environments (``docker-com
.. _`Flower`: https://github.com/mher/flower
+Django-RQ (Optional Task Queue)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you selected ``use_django_rq`` during project initialization, you can use Django-RQ with Valkey for background task processing.
+
+**Services included:**
+
+* **Valkey**: Redis-compatible data store on port 6379
+* **RQ Worker**: Processes background jobs from queues
+* **RQ Scheduler**: Handles scheduled/periodic tasks
+* **RQ Dashboard**: Web-based monitoring at http://localhost:9181
+
+**Quick example:**
+
+.. code-block:: python
+
+ # myapp/tasks.py
+ import django_rq
+
+ @django_rq.job
+ def send_notification(user_id):
+ # Task code here
+ pass
+
+ # Enqueue from anywhere
+ send_notification.delay(user_id)
+
+See the :doc:`/4-guides/using-django-rq` guide for complete documentation.
+
Using Webpack or Gulp
~~~~~~~~~~~~~~~~~~~~~
diff --git a/docs/3-deployment/deployment-with-docker.rst b/docs/3-deployment/deployment-with-docker.rst
index 6ee22fc57b..b10801c86c 100644
--- a/docs/3-deployment/deployment-with-docker.rst
+++ b/docs/3-deployment/deployment-with-docker.rst
@@ -18,7 +18,8 @@ Before you begin, check out the ``docker-compose.production.yml`` file in the ro
* ``django``: your application running behind ``Gunicorn``;
* ``postgres``: PostgreSQL database with the application's relational data;
-* ``redis``: Redis instance for caching;
+* ``redis``: Redis instance for caching (and Celery if enabled);
+* ``valkey``: Valkey instance for Django-RQ task queue (if ``use_django_rq`` is enabled);
* ``traefik``: Traefik reverse proxy with HTTPS on by default.
Provided you have opted for Celery (via setting ``use_celery`` to ``y``) there are three more services:
@@ -31,6 +32,14 @@ The ``flower`` service is served by Traefik over HTTPS, through the port ``5555`
.. _`Flower`: https://github.com/mher/flower
+If you have opted for Django-RQ (via setting ``use_django_rq`` to ``y``) there are three additional services:
+
+* ``rqworker`` running an RQ worker process;
+* ``rqscheduler`` running an RQ scheduler process;
+* ``rqdashboard`` running the RQ Dashboard monitoring interface.
+
+The ``rqdashboard`` service is served by Traefik over HTTPS, through the port ``9181``. For more information about Django-RQ, check out :doc:`/4-guides/using-django-rq`.
+
Configuring the Stack
---------------------
diff --git a/docs/4-guides/using-django-rq.rst b/docs/4-guides/using-django-rq.rst
new file mode 100644
index 0000000000..e7286801a0
--- /dev/null
+++ b/docs/4-guides/using-django-rq.rst
@@ -0,0 +1,366 @@
+Using Django-RQ
+===============
+
+.. index:: django-rq, task queue, background jobs, RQ, Valkey
+
+Django-RQ is a simple task queue system for Django that uses `RQ (Redis Queue) `_ and `Valkey `_ as the message broker. It provides a lightweight alternative to Celery for applications that need background task processing with minimal configuration.
+
+Why Django-RQ?
+--------------
+
+Django-RQ offers several advantages:
+
+- **Simplicity**: Minimal configuration required compared to Celery
+- **Built-in Monitoring**: Includes RQ Dashboard for real-time queue monitoring
+- **Python-native**: Job failures can be inspected directly in Python
+- **Valkey Backend**: Uses Valkey, an open-source Redis-compatible data store
+- **Development-friendly**: Easy to test with synchronous mode
+
+When to Use Django-RQ vs Celery
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+**Use Django-RQ when:**
+
+- You need simple background tasks with minimal configuration
+- You want built-in monitoring without additional setup
+- You're comfortable with a simpler feature set
+- You prefer inspecting job failures in Python
+
+**Use Celery when:**
+
+- You need complex workflows with chains, groups, and chords
+- You require advanced routing and scheduling features
+- You need multiple broker support (RabbitMQ, etc.)
+- Your application demands enterprise-level task processing
+
+Architecture
+------------
+
+When ``use_django_rq`` is enabled, your project includes:
+
+**Services:**
+
+- **Valkey**: Redis-compatible data store running on port 6379
+- **RQ Worker**: Processes background jobs from queues
+- **RQ Scheduler**: Handles scheduled/periodic tasks
+- **RQ Dashboard**: Web-based monitoring interface on port 9181
+
+**Queues:**
+
+- ``default``: General purpose tasks (360s timeout)
+- ``high``: High-priority tasks (500s timeout)
+- ``low``: Low-priority tasks (default timeout)
+
+Configuration
+-------------
+
+Environment Variables
+~~~~~~~~~~~~~~~~~~~~~
+
+Django-RQ uses the following environment variable:
+
+.. code-block:: bash
+
+ # For Docker environments
+ VALKEY_URL=valkey://valkey:6379/0
+
+ # For local development (non-Docker)
+ VALKEY_URL=valkey://localhost:6379/0
+
+Settings
+~~~~~~~~
+
+The following settings are automatically configured in ``config/settings/base.py``:
+
+.. code-block:: python
+
+ RQ_QUEUES = {
+ "default": {
+ "URL": VALKEY_URL,
+ "DEFAULT_TIMEOUT": 360,
+ },
+ "high": {
+ "URL": VALKEY_URL,
+ "DEFAULT_TIMEOUT": 500,
+ },
+ "low": {
+ "URL": VALKEY_URL,
+ },
+ }
+ RQ_SHOW_ADMIN_LINK = True
+
+Creating Tasks
+--------------
+
+Use the ``@job`` decorator to create background tasks:
+
+.. code-block:: python
+
+ # myapp/tasks.py
+ import django_rq
+
+ @django_rq.job
+ def send_welcome_email(user_id):
+ """Send a welcome email to a new user."""
+ from .models import User
+ from django.core.mail import send_mail
+
+ user = User.objects.get(id=user_id)
+ send_mail(
+ "Welcome!",
+ f"Hello {user.username}, welcome to our platform!",
+ "noreply@example.com",
+ [user.email],
+ )
+
+Enqueuing Tasks
+---------------
+
+Enqueue tasks from your views or other code:
+
+.. code-block:: python
+
+ from .tasks import send_welcome_email
+
+ # Enqueue to default queue
+ send_welcome_email.delay(user.id)
+
+ # Enqueue to specific queue
+ queue = django_rq.get_queue("high")
+ queue.enqueue(send_welcome_email, user.id)
+
+ # Schedule task for later
+ from datetime import timedelta
+ queue.enqueue_in(timedelta(minutes=10), send_welcome_email, user.id)
+
+Testing Tasks
+-------------
+
+For testing, use synchronous mode to avoid async complications:
+
+.. code-block:: python
+
+ # tests/test_tasks.py
+ import django_rq
+ from myapp.tasks import send_welcome_email
+
+ def test_send_welcome_email(user):
+ """Test that welcome email task works."""
+ # Get synchronous queue
+ queue = django_rq.get_queue("default", is_async=False)
+
+ # Enqueue and execute immediately
+ job = queue.enqueue(send_welcome_email, user.id)
+
+ # Verify job completed
+ assert job.is_finished
+ assert job.result is None
+
+Monitoring with RQ Dashboard
+-----------------------------
+
+Access the RQ Dashboard at http://localhost:9181 (or your host:9181 in production).
+
+The dashboard shows:
+
+- Active queues and worker count
+- Jobs by state (queued, started, finished, failed)
+- Worker status and statistics
+- Failed job inspection with tracebacks
+
+Management Commands
+-------------------
+
+Django-RQ provides Django management commands:
+
+.. code-block:: bash
+
+ # Start worker manually (not needed with docker-compose)
+ python manage.py rqworker default high low
+
+ # Start scheduler manually
+ python manage.py rqscheduler
+
+ # Get worker statistics
+ python manage.py rqstats
+
+ # Clear all queues
+ python manage.py rqenqueue --clear all
+
+Docker Development
+------------------
+
+With Docker, all RQ services start automatically:
+
+.. code-block:: bash
+
+ docker compose -f docker-compose.local.yml up
+
+Access:
+
+- **Application**: http://localhost:8000
+- **RQ Dashboard**: http://localhost:9181
+
+Services will auto-reload on code changes using ``watchfiles``.
+
+Production Deployment
+---------------------
+
+Docker Production
+~~~~~~~~~~~~~~~~~
+
+RQ services are included in ``docker-compose.production.yml``:
+
+.. code-block:: bash
+
+ docker compose -f docker-compose.production.yml up
+
+Ensure you set the environment variable:
+
+.. code-block:: bash
+
+ VALKEY_URL=valkey://valkey:6379/0
+
+Heroku
+~~~~~~
+
+The ``Procfile`` includes RQ worker and scheduler:
+
+.. code-block:: text
+
+ worker: python manage.py rqworker default high low
+ scheduler: python manage.py rqscheduler
+
+Scale workers as needed:
+
+.. code-block:: bash
+
+ heroku ps:scale worker=2
+
+Best Practices
+--------------
+
+1. **Keep tasks focused**: Each task should do one thing well
+2. **Use timeouts**: Set appropriate timeouts for long-running tasks
+3. **Handle failures gracefully**: Tasks may fail and retry
+4. **Log appropriately**: Use Python logging for debugging
+5. **Test synchronously**: Use ``is_async=False`` in tests
+6. **Choose queues wisely**: Use ``high`` for critical tasks, ``low`` for background cleanup
+
+Example: Image Processing Task
+-------------------------------
+
+Here's a complete example of an image processing task:
+
+.. code-block:: python
+
+ # myapp/tasks.py
+ import logging
+ import django_rq
+ from PIL import Image
+ from django.core.files.storage import default_storage
+
+ logger = logging.getLogger(__name__)
+
+ @django_rq.job('high', timeout=300)
+ def generate_thumbnail(photo_id):
+ """Generate thumbnail for uploaded photo."""
+ from .models import Photo
+
+ try:
+ photo = Photo.objects.get(id=photo_id)
+
+ # Open original image
+ img_path = photo.original.path
+ img = Image.open(img_path)
+
+ # Generate thumbnail
+ img.thumbnail((200, 200))
+
+ # Save thumbnail
+ thumb_path = f"thumbnails/{photo_id}.jpg"
+ with default_storage.open(thumb_path, 'wb') as f:
+ img.save(f, 'JPEG', quality=85)
+
+ # Update model
+ photo.thumbnail = thumb_path
+ photo.save()
+
+ logger.info(f"Generated thumbnail for photo {photo_id}")
+
+ except Photo.DoesNotExist:
+ logger.error(f"Photo {photo_id} not found")
+ raise
+ except Exception as e:
+ logger.exception(f"Failed to generate thumbnail: {e}")
+ raise
+
+ # views.py
+ from django.views.generic import CreateView
+ from .models import Photo
+ from .tasks import generate_thumbnail
+
+ class PhotoUploadView(CreateView):
+ model = Photo
+ fields = ['title', 'original']
+
+ def form_valid(self, form):
+ response = super().form_valid(form)
+ # Enqueue thumbnail generation in background
+ generate_thumbnail.delay(self.object.id)
+ return response
+
+Troubleshooting
+---------------
+
+Workers Not Processing Jobs
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Check that workers are running:
+
+.. code-block:: bash
+
+ docker compose -f docker-compose.local.yml ps
+
+Verify Valkey connection:
+
+.. code-block:: bash
+
+ docker compose -f docker-compose.local.yml exec django python manage.py shell
+ >>> import django_rq
+ >>> queue = django_rq.get_queue()
+ >>> print(queue.connection)
+
+Jobs Failing Silently
+~~~~~~~~~~~~~~~~~~~~~
+
+Check failed job queue in RQ Dashboard or via shell:
+
+.. code-block:: python
+
+ import django_rq
+ from rq.registry import FailedJobRegistry
+
+ queue = django_rq.get_queue()
+ registry = FailedJobRegistry(queue=queue)
+
+ for job_id in registry.get_job_ids():
+ job = queue.fetch_job(job_id)
+ print(f"Job {job_id}: {job.exc_info}")
+
+Dashboard Not Loading
+~~~~~~~~~~~~~~~~~~~~~
+
+Ensure port 9181 is exposed and dashboard service is running:
+
+.. code-block:: bash
+
+ docker compose -f docker-compose.local.yml logs rqdashboard
+
+Further Reading
+---------------
+
+- `Django-RQ Documentation `_
+- `RQ Documentation `_
+- `Valkey Documentation `_
+- :doc:`/4-guides/using-celery` - Alternative task queue guide
diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py
index e35c974865..fa4ed4ca2c 100644
--- a/hooks/post_gen_project.py
+++ b/hooks/post_gen_project.py
@@ -84,6 +84,11 @@ def remove_heroku_files():
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
# Don't remove the file if we are using Travis CI but not using Heroku
continue
+ # Keep Procfile if Celery or RQ is being used
+ if file_name == "Procfile" and (
+ "{{ cookiecutter.use_celery }}".lower() == "y" or "{{ cookiecutter.use_django_rq }}".lower() == "y"
+ ):
+ continue
Path(file_name).unlink()
shutil.rmtree("bin")
@@ -220,8 +225,6 @@ def remove_repo_from_pre_commit_config(repo_to_remove: str):
def remove_celery_files():
file_paths = [
Path("config", "celery_app.py"),
- Path("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
- Path("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
]
for file_path in file_paths:
file_path.unlink()
@@ -398,6 +401,37 @@ def remove_celery_compose_dirs():
shutil.rmtree(Path("compose", "production", "django", "celery"))
+def remove_rq_files():
+ file_paths = []
+ for file_path in file_paths:
+ file_path.unlink()
+
+
+def remove_task_queue_files():
+ """Remove task queue files when neither Celery nor RQ is used."""
+ file_paths = [
+ Path("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
+ Path("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
+ ]
+ for file_path in file_paths:
+ file_path.unlink()
+
+
+def remove_rq_compose_dirs():
+ shutil.rmtree(Path("compose", "local", "django", "rq"))
+ shutil.rmtree(Path("compose", "production", "django", "rq"))
+
+
+def remove_rqdashboard_script():
+ """Remove rqdashboard startup script directory when using django-rq."""
+ dashboard_path = Path("compose", "local", "django", "rq", "dashboard")
+ if dashboard_path.exists():
+ shutil.rmtree(dashboard_path)
+ dashboard_path = Path("compose", "production", "django", "rq", "dashboard")
+ if dashboard_path.exists():
+ shutil.rmtree(dashboard_path)
+
+
def remove_node_dockerfile():
shutil.rmtree(Path("compose", "local", "node"))
@@ -488,6 +522,19 @@ def main(): # noqa: C901, PLR0912, PLR0915
if "{{ cookiecutter.use_docker }}".lower() == "y":
remove_celery_compose_dirs()
+ if "{{ cookiecutter.use_django_rq }}".lower() == "n":
+ remove_rq_files()
+ if "{{ cookiecutter.use_docker }}".lower() == "y":
+ remove_rq_compose_dirs()
+ elif "{{ cookiecutter.use_django_rq }}".lower() == "y":
+ # Remove standalone rqdashboard in favor of built-in admin dashboard
+ if "{{ cookiecutter.use_docker }}".lower() == "y":
+ remove_rqdashboard_script()
+
+ # Remove task queue files only if neither Celery nor RQ is used
+ if "{{ cookiecutter.use_celery }}".lower() == "n" and "{{ cookiecutter.use_django_rq }}".lower() == "n":
+ remove_task_queue_files()
+
if "{{ cookiecutter.ci_tool }}" != "Travis":
remove_dottravisyml_file()
diff --git a/{{cookiecutter.project_slug}}/.envs/.local/.django b/{{cookiecutter.project_slug}}/.envs/.local/.django
index ef581a1c09..a0adb2e129 100644
--- a/{{cookiecutter.project_slug}}/.envs/.local/.django
+++ b/{{cookiecutter.project_slug}}/.envs/.local/.django
@@ -15,3 +15,9 @@ REDIS_URL=redis://redis:6379/0
CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
{%- endif %}
+
+{%- if cookiecutter.use_django_rq == 'y' %}
+# Valkey
+# ------------------------------------------------------------------------------
+VALKEY_URL=redis://valkey:6379/0
+{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.django b/{{cookiecutter.project_slug}}/.envs/.production/.django
index 07ffd7112f..bea67f89c7 100644
--- a/{{cookiecutter.project_slug}}/.envs/.production/.django
+++ b/{{cookiecutter.project_slug}}/.envs/.production/.django
@@ -63,11 +63,11 @@ WEB_CONCURRENCY=4
# ------------------------------------------------------------------------------
SENTRY_DSN=
{% endif %}
-
+{% if cookiecutter.use_celery == 'y' %}
# Redis
# ------------------------------------------------------------------------------
REDIS_URL=redis://redis:6379/0
-{% if cookiecutter.use_celery == 'y' %}
+
# Celery
# ------------------------------------------------------------------------------
@@ -75,3 +75,11 @@ REDIS_URL=redis://redis:6379/0
CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
{% endif %}
+{% if cookiecutter.use_django_rq == 'y' %}
+# Valkey
+# ------------------------------------------------------------------------------
+VALKEY_URL=redis://valkey:6379/0
+
+# Django-RQ
+# ------------------------------------------------------------------------------
+{% endif %}
diff --git a/{{cookiecutter.project_slug}}/Procfile b/{{cookiecutter.project_slug}}/Procfile
index 6424e048d3..0b3ae67e73 100644
--- a/{{cookiecutter.project_slug}}/Procfile
+++ b/{{cookiecutter.project_slug}}/Procfile
@@ -8,3 +8,7 @@ web: gunicorn config.wsgi:application
worker: REMAP_SIGTERM=SIGQUIT celery -A config.celery_app worker --loglevel=info
beat: REMAP_SIGTERM=SIGQUIT celery -A config.celery_app beat --loglevel=info
{%- endif %}
+{%- if cookiecutter.use_django_rq == "y" %}
+worker: python manage.py rqworker default high low
+scheduler: python manage.py rqscheduler
+{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
index 4eb6a7b324..2395fefa94 100644
--- a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
@@ -68,4 +68,14 @@ RUN sed -i 's/\r$//g' /start-flower
RUN chmod +x /start-flower
{% endif %}
+{% if cookiecutter.use_django_rq == "y" %}
+COPY ./compose/local/django/rq/worker/start /start-rqworker
+RUN sed -i 's/\r$//g' /start-rqworker
+RUN chmod +x /start-rqworker
+
+COPY ./compose/local/django/rq/scheduler/start /start-rqscheduler
+RUN sed -i 's/\r$//g' /start-rqscheduler
+RUN chmod +x /start-rqscheduler
+{% endif %}
+
ENTRYPOINT ["/entrypoint"]
diff --git a/{{cookiecutter.project_slug}}/compose/local/django/rq/scheduler/start b/{{cookiecutter.project_slug}}/compose/local/django/rq/scheduler/start
new file mode 100644
index 0000000000..dc3add9eab
--- /dev/null
+++ b/{{cookiecutter.project_slug}}/compose/local/django/rq/scheduler/start
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -o errexit
+set -o nounset
+
+exec watchfiles 'python manage.py rqscheduler' --filter python
diff --git a/{{cookiecutter.project_slug}}/compose/local/django/rq/worker/start b/{{cookiecutter.project_slug}}/compose/local/django/rq/worker/start
new file mode 100644
index 0000000000..92ca67839a
--- /dev/null
+++ b/{{cookiecutter.project_slug}}/compose/local/django/rq/worker/start
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+set -o errexit
+set -o nounset
+
+exec watchfiles 'python manage.py rqworker default high low' --filter python
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
index d191aa273d..ec62726d58 100644
--- a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
@@ -106,6 +106,17 @@ RUN sed -i 's/\r$//g' /start-flower
RUN chmod +x /start-flower
{%- endif %}
+{%- if cookiecutter.use_django_rq == "y" %}
+COPY --chown=django:django ./compose/production/django/rq/worker/start /start-rqworker
+RUN sed -i 's/\r$//g' /start-rqworker
+RUN chmod +x /start-rqworker
+
+
+COPY --chown=django:django ./compose/production/django/rq/scheduler/start /start-rqscheduler
+RUN sed -i 's/\r$//g' /start-rqscheduler
+RUN chmod +x /start-rqscheduler
+{%- endif %}
+
# Copy the application from the builder
COPY --from=python-build-stage --chown=django:django ${APP_HOME} ${APP_HOME}
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/rq/scheduler/start b/{{cookiecutter.project_slug}}/compose/production/django/rq/scheduler/start
new file mode 100644
index 0000000000..a9b94080f5
--- /dev/null
+++ b/{{cookiecutter.project_slug}}/compose/production/django/rq/scheduler/start
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -o errexit
+set -o pipefail
+set -o nounset
+
+exec python manage.py rqscheduler
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/rq/worker/start b/{{cookiecutter.project_slug}}/compose/production/django/rq/worker/start
new file mode 100644
index 0000000000..75753a2b80
--- /dev/null
+++ b/{{cookiecutter.project_slug}}/compose/production/django/rq/worker/start
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+set -o errexit
+set -o pipefail
+set -o nounset
+
+exec python manage.py rqworker default high low
diff --git a/{{cookiecutter.project_slug}}/config/settings/base.py b/{{cookiecutter.project_slug}}/config/settings/base.py
index ba0a7ce463..efcb48adbe 100644
--- a/{{cookiecutter.project_slug}}/config/settings/base.py
+++ b/{{cookiecutter.project_slug}}/config/settings/base.py
@@ -92,6 +92,9 @@
{%- if cookiecutter.use_celery == 'y' %}
"django_celery_beat",
{%- endif %}
+{%- if cookiecutter.use_django_rq == 'y' %}
+ "django_rq",
+{%- endif %}
{%- if cookiecutter.use_drf == "y" %}
"rest_framework",
"rest_framework.authtoken",
@@ -285,8 +288,13 @@
"root": {"level": "INFO", "handlers": ["console"]},
}
+{% if cookiecutter.use_celery == 'y' -%}
REDIS_URL = env("REDIS_URL", default="redis://{% if cookiecutter.use_docker == 'y' %}redis{%else%}localhost{% endif %}:6379/0")
REDIS_SSL = REDIS_URL.startswith("rediss://")
+{%- endif %}
+{% if cookiecutter.use_django_rq == 'y' -%}
+VALKEY_URL = env("VALKEY_URL", default="valkey://{% if cookiecutter.use_docker == 'y' %}valkey{%else%}localhost{% endif %}:6379/0")
+{%- endif %}
{% if cookiecutter.use_celery == 'y' -%}
# Celery
@@ -330,6 +338,26 @@
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#worker-hijack-root-logger
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
+{%- endif %}
+{% if cookiecutter.use_django_rq == 'y' -%}
+# Django-RQ
+# ------------------------------------------------------------------------------
+# https://github.com/rq/django-rq
+RQ_QUEUES = {
+ "default": {
+ "URL": VALKEY_URL,
+ "DEFAULT_TIMEOUT": 360,
+ },
+ "high": {
+ "URL": VALKEY_URL,
+ "DEFAULT_TIMEOUT": 500,
+ },
+ "low": {
+ "URL": VALKEY_URL,
+ },
+}
+# https://github.com/rq/django-rq#support-for-django-redis-and-django-redis-cache
+RQ_SHOW_ADMIN_LINK = True
{%- endif %}
# django-allauth
# ------------------------------------------------------------------------------
diff --git a/{{cookiecutter.project_slug}}/docker-compose.local.yml b/{{cookiecutter.project_slug}}/docker-compose.local.yml
index c713b2e941..1fbc34fbdd 100644
--- a/{{cookiecutter.project_slug}}/docker-compose.local.yml
+++ b/{{cookiecutter.project_slug}}/docker-compose.local.yml
@@ -2,9 +2,10 @@ volumes:
{{ cookiecutter.project_slug }}_local_postgres_data: {}
{{ cookiecutter.project_slug }}_local_postgres_data_backups: {}
{% if cookiecutter.use_celery == 'y' %}{{ cookiecutter.project_slug }}_local_redis_data: {}{% endif %}
+ {% if cookiecutter.use_django_rq == 'y' %}{{ cookiecutter.project_slug }}_local_valkey_data: {}{% endif %}
services:
- django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
+ django:{% if cookiecutter.use_celery == 'y' or cookiecutter.use_django_rq == 'y' %} &django{% endif %}
build:
context: .
dockerfile: ./compose/local/django/Dockerfile
@@ -15,6 +16,9 @@ services:
{%- if cookiecutter.use_celery == 'y' %}
- redis
{%- endif %}
+ {%- if cookiecutter.use_django_rq == 'y' %}
+ - valkey
+ {%- endif %}
{%- if cookiecutter.use_mailpit == 'y' %}
- mailpit
{%- endif %}
@@ -61,6 +65,9 @@ services:
volumes:
- {{ cookiecutter.project_slug }}_local_redis_data:/data
+ {%- endif %}
+ {%- if cookiecutter.use_celery == 'y' %}
+
celeryworker:
<<: *django
image: {{ cookiecutter.project_slug }}_local_celeryworker
@@ -95,6 +102,41 @@ services:
- '5555:5555'
command: /start-flower
+ {%- endif %}
+ {%- if cookiecutter.use_django_rq == 'y' %}
+
+ valkey:
+ image: docker.io/valkey/valkey:8.0
+ container_name: {{ cookiecutter.project_slug }}_local_valkey
+ volumes:
+ - {{ cookiecutter.project_slug }}_local_valkey_data:/data
+
+ rqworker:
+ <<: *django
+ image: {{ cookiecutter.project_slug }}_local_rqworker
+ container_name: {{ cookiecutter.project_slug }}_local_rqworker
+ depends_on:
+ - valkey
+ - postgres
+ {%- if cookiecutter.use_mailpit == 'y' %}
+ - mailpit
+ {%- endif %}
+ ports: []
+ command: /start-rqworker
+
+ rqscheduler:
+ <<: *django
+ image: {{ cookiecutter.project_slug }}_local_rqscheduler
+ container_name: {{ cookiecutter.project_slug }}_local_rqscheduler
+ depends_on:
+ - valkey
+ - postgres
+ {%- if cookiecutter.use_mailpit == 'y' %}
+ - mailpit
+ {%- endif %}
+ ports: []
+ command: /start-rqscheduler
+
{%- endif %}
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
diff --git a/{{cookiecutter.project_slug}}/docker-compose.production.yml b/{{cookiecutter.project_slug}}/docker-compose.production.yml
index f458d08046..337a5fc4ac 100644
--- a/{{cookiecutter.project_slug}}/docker-compose.production.yml
+++ b/{{cookiecutter.project_slug}}/docker-compose.production.yml
@@ -8,10 +8,13 @@ volumes:
{% if cookiecutter.use_celery == 'y' %}
production_redis_data: {}
{% endif %}
+ {% if cookiecutter.use_django_rq == 'y' %}
+ production_valkey_data: {}
+ {% endif %}
services:
- django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
+ django:{% if cookiecutter.use_celery == 'y' or cookiecutter.use_django_rq == 'y' %} &django{% endif %}
build:
context: .
dockerfile: ./compose/production/django/Dockerfile
@@ -35,7 +38,12 @@ services:
{%- endif %}
depends_on:
- postgres
+ {%- if cookiecutter.use_celery == 'y' %}
- redis
+ {%- endif %}
+ {%- if cookiecutter.use_django_rq == 'y' %}
+ - valkey
+ {%- endif %}
env_file:
- ./.envs/.production/.django
- ./.envs/.production/.postgres
@@ -72,14 +80,14 @@ services:
- '0.0.0.0:5555:5555'
{%- endif %}
+ {%- if cookiecutter.use_celery == 'y' %}
+
redis:
image: docker.io/redis:7.2
- {% if cookiecutter.use_celery == 'y' %}
volumes:
- production_redis_data:/data
- {% endif %}
-
+ {%- endif %}
{%- if cookiecutter.use_celery == 'y' %}
celeryworker:
@@ -97,6 +105,23 @@ services:
image: {{ cookiecutter.project_slug }}_production_flower
command: /start-flower
{%- endif %}
+ {%- if cookiecutter.use_django_rq == 'y' %}
+
+ valkey:
+ image: docker.io/valkey/valkey:8.0
+ volumes:
+ - production_valkey_data:/data
+
+ rqworker:
+ <<: *django
+ image: {{ cookiecutter.project_slug }}_production_rqworker
+ command: /start-rqworker
+
+ rqscheduler:
+ <<: *django
+ image: {{ cookiecutter.project_slug }}_production_rqscheduler
+ command: /start-rqscheduler
+ {%- endif %}
{%- if cookiecutter.cloud_provider == 'AWS' %}
awscli:
diff --git a/{{cookiecutter.project_slug}}/requirements/base.txt b/{{cookiecutter.project_slug}}/requirements/base.txt
index 8e37dbe2de..532c091aaf 100644
--- a/{{cookiecutter.project_slug}}/requirements/base.txt
+++ b/{{cookiecutter.project_slug}}/requirements/base.txt
@@ -22,6 +22,11 @@ django-celery-beat==2.8.1 # https://github.com/celery/django-celery-beat
flower==2.0.1 # https://github.com/mher/flower
{%- endif %}
{%- endif %}
+{%- if cookiecutter.use_django_rq == "y" %}
+django-rq==2.10.2 # https://github.com/rq/django-rq
+rq==1.16.2 # https://github.com/rq/rq
+rq-scheduler==0.13.1 # https://github.com/rq/rq-scheduler
+{%- endif %}
{%- if cookiecutter.use_async == 'y' %}
uvicorn[standard]==0.38.0 # https://github.com/Kludex/uvicorn
uvicorn-worker==0.4.0 # https://github.com/Kludex/uvicorn-worker
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tasks.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tasks.py
index ca51cd7401..e94608ac75 100644
--- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tasks.py
+++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tasks.py
@@ -1,9 +1,17 @@
+{% if cookiecutter.use_celery == 'y' -%}
from celery import shared_task
+{%- elif cookiecutter.use_django_rq == 'y' -%}
+import django_rq
+{%- endif %}
from .models import User
+{% if cookiecutter.use_celery == 'y' -%}
@shared_task()
+{%- elif cookiecutter.use_django_rq == 'y' -%}
+@django_rq.job
+{%- endif %}
def get_users_count():
- """A pointless Celery task to demonstrate usage."""
+ """A pointless {% if cookiecutter.use_celery == 'y' %}Celery{% elif cookiecutter.use_django_rq == 'y' %}RQ{% endif %} task to demonstrate usage."""
return User.objects.count()
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py
index d3f6101399..cdb6dbbc09 100644
--- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py
+++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_tasks.py
@@ -1,5 +1,9 @@
import pytest
+{% if cookiecutter.use_celery == 'y' -%}
from celery.result import EagerResult
+{%- elif cookiecutter.use_django_rq == 'y' -%}
+import django_rq
+{%- endif %}
from {{ cookiecutter.project_slug }}.users.tasks import get_users_count
from {{ cookiecutter.project_slug }}.users.tests.factories import UserFactory
@@ -8,10 +12,16 @@
def test_user_count(settings):
- """A basic test to execute the get_users_count Celery task."""
+ """A basic test to execute the get_users_count {% if cookiecutter.use_celery == 'y' %}Celery{% elif cookiecutter.use_django_rq == 'y' %}RQ{% endif %} task."""
batch_size = 3
UserFactory.create_batch(batch_size)
+{% if cookiecutter.use_celery == 'y' -%}
settings.CELERY_TASK_ALWAYS_EAGER = True
task_result = get_users_count.delay()
assert isinstance(task_result, EagerResult)
assert task_result.result == batch_size
+{% elif cookiecutter.use_django_rq == 'y' -%}
+ queue = django_rq.get_queue("default", is_async=False)
+ job = queue.enqueue(get_users_count)
+ assert job.result == batch_size
+{%- endif %}