diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 3d34792e44608412a7829cf2750ffb891058975a..96900dd79e1d93a2fc0fe5bf37926a5ad10144ad 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -11,7 +11,7 @@ include:
 
 # For jobs that run backend scripts directly
 .backend-setup:
-  image: registry.gitlab.com/arkindex/backend/base:django-3.1.2
+  image: registry.gitlab.com/arkindex/backend/base:rq
 
   cache:
     paths:
diff --git a/.isort.cfg b/.isort.cfg
index 54ca8ae31bdf9613765ae83d72a39cff4ecf4e69..a33fb368115d5f8730f3c1a8d1eeffdd9f848a7c 100644
--- a/.isort.cfg
+++ b/.isort.cfg
@@ -8,4 +8,4 @@ line_length = 120
 
 default_section=FIRSTPARTY
 known_first_party = arkindex_common,ponos,transkribus
-known_third_party = asgiref,boto3,botocore,channels,corsheaders,django,django_admin_hstore_widget,elasticsearch,elasticsearch_dsl,enumfields,gitlab,mock,psycopg2,requests,responses,rest_framework,sentry_sdk,setuptools,tenacity,tripoli,yaml
+known_third_party = boto3,botocore,corsheaders,django,django_admin_hstore_widget,django_rq,elasticsearch,elasticsearch_dsl,enumfields,gitlab,psycopg2,requests,responses,rest_framework,setuptools,tenacity,tripoli,yaml
diff --git a/Dockerfile b/Dockerfile
index eceeae5b4d47d0aeb622f678e820cf24c4e8b80f..c3bc0b90604b8542bc5054e2a44d01210daa8a90 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,10 +1,10 @@
-FROM registry.gitlab.com/arkindex/backend/base:django-3.1.2 as build
+FROM registry.gitlab.com/arkindex/backend/base:rq as build
 
 RUN mkdir build
 ADD . build
 RUN cd build && python3 setup.py sdist
 
-FROM registry.gitlab.com/arkindex/backend/base:django-3.1.2
+FROM registry.gitlab.com/arkindex/backend/base:rq
 
 ARG COMMON_BRANCH=master
 ARG COMMON_ID=9855787
diff --git a/Dockerfile.binary b/Dockerfile.binary
index c30e984258d13215cebbe3994a3b0dd4c22c9e6b..9a054e1f7fc9a019db9ff855d8029b155e77fa1b 100644
--- a/Dockerfile.binary
+++ b/Dockerfile.binary
@@ -65,7 +65,7 @@ RUN python -m nuitka \
       arkindex/manage.py
 
 # Start over from a clean setup
-FROM registry.gitlab.com/arkindex/backend/base:django-3.1.2 as build
+FROM registry.gitlab.com/arkindex/backend/base:rq as build
 
 # Import files from compilation
 RUN mkdir /usr/share/arkindex
diff --git a/Makefile b/Makefile
index 2845cddef832b5abc523a554e2ecc576dd933872..d289f9168997a65a9cc867355dd9e7edc630c43b 100644
--- a/Makefile
+++ b/Makefile
@@ -20,6 +20,9 @@ build:
 binary:
 	CI_PROJECT_DIR=$(ROOT_DIR) CI_REGISTRY_IMAGE=$(IMAGE_TAG) COMMON_BRANCH=$(COMMON_BRANCH) PONOS_BRANCH=$(PONOS_BRANCH) $(ROOT_DIR)/ci/build.sh Dockerfile.binary -binary
 
+worker:
+	arkindex/manage.py rqworker -v 2
+
 test-fixtures:
 	$(eval export PGPASSWORD=devdata)
 	psql -h 127.0.0.1 -p 9100 -U devuser -c 'CREATE DATABASE arkindex_tmp_fixtures WITH TEMPLATE arkindex_dev' template1
diff --git a/README.md b/README.md
index 944683fd826ac14fbe0b5357343e1b2f60ca36bb..9b9907f4c510a533c8c3c06eb432c123d9827f5a 100644
--- a/README.md
+++ b/README.md
@@ -141,6 +141,7 @@ pre-commit install
 The linting workflow will now run on modified files before committing, and may fix issues for you.
 
 If you want to run the full workflow on all the files: `pre-commit run -a`.
+
 ## Debugging tools
 
 Run `pip install ipython django-debug-toolbar django_extensions` to install all the available optional dev tools for the backend.
@@ -176,3 +177,13 @@ SHELL_PLUS_POST_IMPORTS = [
 ```
 
 You may want to also uninstall `django-nose`, as it is an optional test runner that is used for code coverage in the CI. Uninstalling will remove about a hundred useless lines in the `./manage.py test` output so you will no longer have to scroll to the test errors list.
+
+## Asynchronous tasks
+
+We use [rq](https://python-rq.org/), integrated via [django-rq](https://pypi.org/project/django-rq/), to run tasks without blocking an API request or causing timeouts. To call them in Python code, you should use the trigger methods in `arkindex.project.triggers`; those will do some safety checks to make catching some errors easier in dev. The actual tasks are in `arkindex.documents.tasks`. The following tasks exist:
+
+* Delete ML results from a corpus or an element and its children: `ml_results_delete`
+* Delete a corpus: `corpus_delete`
+* Reindex elements, transcriptions or entities into ElasticSearch: `reindex_start`
+
+To run them, use `make worker` to start a RQ worker. You will need to have Redis running; `make slim` or `make` in the architecture will provide it. `make` in the architecture also provides a RQ worker running in Docker from a binary build.
diff --git a/arkindex/dataimport/tests/test_repos.py b/arkindex/dataimport/tests/test_repos.py
index 60b79b4426385441c11491ae2dc2e590aa3169dc..08f92f5686e299176370ed312619434de375a174 100644
--- a/arkindex/dataimport/tests/test_repos.py
+++ b/arkindex/dataimport/tests/test_repos.py
@@ -1,5 +1,6 @@
+from unittest.mock import MagicMock, patch
+
 from django.urls import reverse
-from mock import MagicMock, patch
 from rest_framework import status
 from rest_framework.exceptions import ValidationError
 from rest_framework.serializers import DateTimeField
diff --git a/arkindex/documents/consumers.py b/arkindex/documents/consumers.py
deleted file mode 100644
index a29b81e4ea09d50099f6d02a8e4990e222622c61..0000000000000000000000000000000000000000
--- a/arkindex/documents/consumers.py
+++ /dev/null
@@ -1,216 +0,0 @@
-import logging
-from math import ceil
-
-from channels.consumer import SyncConsumer
-from django.db.models import Q
-from django.db.models.deletion import Collector
-from django.db.models.signals import pre_delete
-
-from arkindex.dataimport.models import DataImportElement
-from arkindex.documents.indexer import Indexer
-from arkindex.documents.models import (
-    Classification,
-    Corpus,
-    Element,
-    ElementPath,
-    Entity,
-    EntityLink,
-    MetaData,
-    Selection,
-    Transcription,
-    TranscriptionEntity,
-)
-from arkindex.documents.signals import pre_delete_handler
-from arkindex.project.tools import disconnect_signal
-from arkindex.project.triggers import reindex_start
-
-logger = logging.getLogger(__name__)
-
-
-def _delete_queryset(queryset, batch_size=1000):
-    """
-    Helper to delete large querysets with as little SQL queries and memory footprint as possible.
-    """
-    count = queryset.count()
-    logger.info('Deleting {} {}'.format(count, queryset.model.__name__))
-
-    if not count:
-        return
-
-    if Collector(using=queryset.db).can_fast_delete(queryset.all()):
-        # If a single DELETE statement can be used,
-        # bypass both the batched deletion and Django's related objects checks.
-        logger.debug('Using single-query deletion')
-        queryset._raw_delete(using=queryset.db)
-        return
-
-    if count <= batch_size:
-        # If there is a single batch, just delete.
-        queryset.delete()
-        return
-
-    for i in range(ceil(count / batch_size)):
-        logger.debug('Deleting batch {}'.format(i + 1))
-        # Deleting a slice is not allowed;
-        # we use a sliced subquery instead and still delete in a single query.
-        # DELETE FROM … WHERE id IN (SELECT id FROM … LIMIT [batch_size])
-        ids = queryset[:batch_size].values('id')
-        queryset.model.objects.filter(id__in=ids).delete()
-
-
-class ReindexConsumer(SyncConsumer):
-
-    def reindex_start(self, message):
-        corpus_id, element_id, entity_id, transcriptions, elements, entities, drop = map(
-            message.get,
-            ('corpus', 'element', 'entity', 'transcriptions', 'elements', 'entities', 'drop'),
-            (None, None, None, True, True, True, False),  # Default values
-        )
-        indexer = Indexer()
-
-        if drop:
-            if transcriptions:
-                indexer.drop_index(Transcription.es_document)
-            if elements:
-                indexer.drop_index(Element.es_document)
-            if entities:
-                indexer.drop_index(Entity.es_document)
-            indexer.setup()
-
-        if entity_id:
-            # Pick entity only
-            transcriptions_queryset = Transcription.objects.none()
-            elements_queryset = Element.objects.none()
-            entities_queryset = Entity.objects.filter(id=entity_id)
-        elif element_id or corpus_id:
-            if element_id:
-                # Pick this element, and all its children
-                elements_queryset = Element.objects.filter(Q(id=element_id) | Q(paths__path__contains=[element_id]))
-            else:
-                # Pick all elements in the corpus
-                elements_queryset = Element.objects.filter(corpus_id=corpus_id)
-
-            transcriptions_queryset = Transcription.objects.filter(element__in=elements_queryset)
-            entities_queryset = Entity.objects.filter(
-                Q(metadatas__element__in=elements_queryset)
-                | Q(transcriptions__element__in=elements_queryset)
-            )
-        else:
-            transcriptions_queryset = Transcription.objects.all()
-            elements_queryset = Element.objects.all()
-            entities_queryset = Entity.objects.all()
-
-        if transcriptions:
-            indexer.run_index(transcriptions_queryset.select_related('element'), bulk_size=400)
-        if elements:
-            indexer.run_index(
-                elements_queryset.select_related('type').prefetch_related('metadatas', 'transcriptions'),
-                bulk_size=100,
-            )
-        if entities:
-            indexer.run_index(entities_queryset, bulk_size=400)
-
-
-class MLResultsConsumer(SyncConsumer):
-
-    def ml_results_delete(self, message):
-        corpus_id, element_id, batch_size = (
-            message.get('corpus_id'),
-            message.get('element_id'),
-            message.get('batch_size', 1000),
-        )
-        assert corpus_id or element_id, 'Missing element or corpus IDs'
-
-        if element_id:
-            logger.info('Deleting ML results on element {}'.format(element_id))
-            element = Element.objects.get(id=element_id)
-            if element.type.folder:
-                # The folder AND its children
-                elements = Element.objects.filter(id=element_id).values('id').union(
-                    # Disable ordering here because we do not need it and it adds an extra column,
-                    # causing the UNION to fail
-                    Element.objects.get_descending(element_id).order_by().values('id')
-                )
-            else:
-                elements = [element]
-
-            if not corpus_id:
-                # The corpus ID is still used in some deletions; deduce it from the element.
-                corpus_id = Element.objects.get(id=element_id).corpus_id
-        elif corpus_id:
-            logger.info('Deleting ML results on corpus {}'.format(corpus_id))
-            elements = Element.objects.filter(corpus_id=corpus_id)
-
-        # Simple deletions for classifications and transcriptions.
-        _delete_queryset(Classification.objects.filter(element__in=elements).exclude(source__slug='manual'), batch_size)
-        _delete_queryset(Transcription.objects.filter(element__in=elements).exclude(source__slug='manual'), batch_size)
-
-        # Entity deletion is complex: they can be linked to different elements both on transcriptions and metadata.
-        # Metadata are not considered ML results so we need to keep them: update them to unlink entities.
-        logger.info('Updating element metadata')
-        MetaData.objects.filter(element__in=elements).update(entity_id=None)
-
-        # We removed transcriptions earlier, which implies removing the links with entities.
-        # All is left is to remove 'lonely' entities.
-        # Note: __isnull's implementation will fetch all element IDs into a list before deleting—use batches!
-        _delete_queryset(Entity.objects.filter(
-            corpus_id=corpus_id,
-            metadatas__isnull=True,
-            transcriptions__isnull=True,
-        ), batch_size)
-
-        # Trigger a reindexation to clear up deleted results from search indexes
-        reindex_start(
-            element=element_id,
-            corpus=corpus_id,
-            transcriptions=True,
-            elements=True,
-            entities=True,
-        )
-
-
-class CorpusConsumer(SyncConsumer):
-
-    def corpus_delete(self, message):
-        corpus_id, batch_size = (
-            message.get('id'),
-            message.get('batch_size', 1000),
-        )
-
-        corpus = Corpus.objects.get(id=corpus_id)
-
-        logger.info('Deleting {!r}'.format(corpus))
-
-        # Delete all related objects, bypassing RESTRICTs deletion rules
-        # and Django's way of loading everything into memory before deleting.
-        querysets = [
-            DataImportElement.objects.filter(
-                Q(dataimport__in=corpus.imports.all())
-                | Q(element__in=corpus.elements.all())
-            ),
-            corpus.imports.all(),
-            corpus.files.all(),
-            EntityLink.objects.filter(role__corpus_id=corpus_id),
-            TranscriptionEntity.objects.filter(
-                Q(entity__in=corpus.entities.all())
-                | Q(transcription__in=Transcription.objects.filter(element__in=corpus.elements.all()))
-            ),
-            corpus.entities.all(),
-            corpus.roles.all(),
-            corpus.allowed_metadatas.all(),
-            MetaData.objects.filter(element__in=corpus.elements.all()),
-            Classification.objects.filter(element__in=corpus.elements.all()),
-            corpus.ml_classes.all(),
-            Transcription.objects.filter(element__in=corpus.elements.all()),
-            ElementPath.objects.filter(element__in=corpus.elements.all()),
-            Selection.objects.filter(element__in=corpus.elements.all()),
-            corpus.elements.all(),
-            corpus.types.all(),
-        ]
-
-        with disconnect_signal(pre_delete, sender=Element, receiver=pre_delete_handler):
-            for queryset in querysets:
-                _delete_queryset(queryset, batch_size)
-
-        corpus.delete()
-        logger.info('Deleted corpus {}'.format(corpus_id))
diff --git a/arkindex/documents/tasks.py b/arkindex/documents/tasks.py
new file mode 100644
index 0000000000000000000000000000000000000000..597739ac12ab19a97027dbd56c134c2ae72ee199
--- /dev/null
+++ b/arkindex/documents/tasks.py
@@ -0,0 +1,197 @@
+import logging
+from math import ceil
+from typing import Optional
+
+from django.db.models import Q
+from django.db.models.deletion import Collector
+from django.db.models.signals import pre_delete
+from django_rq import job
+
+from arkindex.dataimport.models import DataImportElement
+from arkindex.documents.indexer import Indexer
+from arkindex.documents.models import (
+    Classification,
+    Corpus,
+    Element,
+    ElementPath,
+    Entity,
+    EntityLink,
+    MetaData,
+    Selection,
+    Transcription,
+    TranscriptionEntity,
+)
+from arkindex.documents.signals import pre_delete_handler
+from arkindex.project.tools import disconnect_signal
+
+logger = logging.getLogger(__name__)
+
+
+def _delete_queryset(queryset, batch_size=1000):
+    """
+    Helper to delete large querysets with as little SQL queries and memory footprint as possible.
+    """
+    count = queryset.count()
+    logger.info('Deleting {} {}'.format(count, queryset.model.__name__))
+
+    if not count:
+        return
+
+    if Collector(using=queryset.db).can_fast_delete(queryset.all()):
+        # If a single DELETE statement can be used,
+        # bypass both the batched deletion and Django's related objects checks.
+        logger.debug('Using single-query deletion')
+        queryset._raw_delete(using=queryset.db)
+        return
+
+    if count <= batch_size:
+        # If there is a single batch, just delete.
+        queryset.delete()
+        return
+
+    for i in range(ceil(count / batch_size)):
+        logger.debug('Deleting batch {}'.format(i + 1))
+        # Deleting a slice is not allowed;
+        # we use a sliced subquery instead and still delete in a single query.
+        # DELETE FROM … WHERE id IN (SELECT id FROM … LIMIT [batch_size])
+        ids = queryset[:batch_size].values('id')
+        queryset.model.objects.filter(id__in=ids).delete()
+
+
+@job
+def reindex_start(corpus_id: Optional[str] = None,
+                  element_id: Optional[str] = None,
+                  entity_id: Optional[str] = None,
+                  transcriptions: bool = True,
+                  elements: bool = True,
+                  entities: bool = True,
+                  drop: bool = False) -> None:
+    indexer = Indexer()
+
+    if drop:
+        if transcriptions:
+            indexer.drop_index(Transcription.es_document)
+        if elements:
+            indexer.drop_index(Element.es_document)
+        if entities:
+            indexer.drop_index(Entity.es_document)
+        indexer.setup()
+
+    if entity_id:
+        # Pick entity only
+        transcriptions_queryset = Transcription.objects.none()
+        elements_queryset = Element.objects.none()
+        entities_queryset = Entity.objects.filter(id=entity_id)
+    elif element_id or corpus_id:
+        if element_id:
+            # Pick this element, and all its children
+            elements_queryset = Element.objects.filter(Q(id=element_id) | Q(paths__path__contains=[element_id]))
+        else:
+            # Pick all elements in the corpus
+            elements_queryset = Element.objects.filter(corpus_id=corpus_id)
+
+        transcriptions_queryset = Transcription.objects.filter(element__in=elements_queryset)
+        entities_queryset = Entity.objects.filter(
+            Q(metadatas__element__in=elements_queryset)
+            | Q(transcriptions__element__in=elements_queryset)
+        )
+    else:
+        transcriptions_queryset = Transcription.objects.all()
+        elements_queryset = Element.objects.all()
+        entities_queryset = Entity.objects.all()
+
+    if transcriptions:
+        indexer.run_index(transcriptions_queryset.select_related('element'), bulk_size=400)
+    if elements:
+        indexer.run_index(
+            elements_queryset.select_related('type').prefetch_related('metadatas', 'transcriptions'),
+            bulk_size=100,
+        )
+    if entities:
+        indexer.run_index(entities_queryset, bulk_size=400)
+
+
+@job
+def ml_results_delete(corpus_id: Optional[str] = None,
+                      element_id: Optional[str] = None,
+                      batch_size: int = 1000) -> None:
+    assert corpus_id or element_id, 'Missing element or corpus IDs'
+
+    if element_id:
+        logger.info('Deleting ML results on element {}'.format(element_id))
+        element = Element.objects.get(id=element_id)
+        if element.type.folder:
+            # The folder AND its children
+            elements = Element.objects.filter(id=element_id).values('id').union(
+                # Disable ordering here because we do not need it and it adds an extra column,
+                # causing the UNION to fail
+                Element.objects.get_descending(element_id).order_by().values('id')
+            )
+        else:
+            elements = [element]
+
+        if not corpus_id:
+            # The corpus ID is still used in some deletions; deduce it from the element.
+            corpus_id = Element.objects.get(id=element_id).corpus_id
+    elif corpus_id:
+        logger.info('Deleting ML results on corpus {}'.format(corpus_id))
+        elements = Element.objects.filter(corpus_id=corpus_id)
+
+    # Simple deletions for classifications and transcriptions.
+    _delete_queryset(Classification.objects.filter(element__in=elements).exclude(source__slug='manual'), batch_size)
+    _delete_queryset(Transcription.objects.filter(element__in=elements).exclude(source__slug='manual'), batch_size)
+
+    # Entity deletion is complex: they can be linked to different elements both on transcriptions and metadata.
+    # Metadata are not considered ML results so we need to keep them: update them to unlink entities.
+    logger.info('Updating element metadata')
+    MetaData.objects.filter(element__in=elements).update(entity_id=None)
+
+    # We removed transcriptions earlier, which implies removing the links with entities.
+    # All is left is to remove 'lonely' entities.
+    # Note: __isnull's implementation will fetch all element IDs into a list before deleting—use batches!
+    _delete_queryset(Entity.objects.filter(
+        corpus_id=corpus_id,
+        metadatas__isnull=True,
+        transcriptions__isnull=True,
+    ), batch_size)
+
+
+@job
+def corpus_delete(corpus_id: str, batch_size: int = 1000) -> None:
+    corpus = Corpus.objects.get(id=corpus_id)
+
+    logger.info('Deleting {!r}'.format(corpus))
+
+    # Delete all related objects, bypassing RESTRICTs deletion rules
+    # and Django's way of loading everything into memory before deleting.
+    querysets = [
+        DataImportElement.objects.filter(
+            Q(dataimport__in=corpus.imports.all())
+            | Q(element__in=corpus.elements.all())
+        ),
+        corpus.imports.all(),
+        corpus.files.all(),
+        EntityLink.objects.filter(role__corpus_id=corpus_id),
+        TranscriptionEntity.objects.filter(
+            Q(entity__in=corpus.entities.all())
+            | Q(transcription__in=Transcription.objects.filter(element__in=corpus.elements.all()))
+        ),
+        corpus.entities.all(),
+        corpus.roles.all(),
+        corpus.allowed_metadatas.all(),
+        MetaData.objects.filter(element__in=corpus.elements.all()),
+        Classification.objects.filter(element__in=corpus.elements.all()),
+        corpus.ml_classes.all(),
+        Transcription.objects.filter(element__in=corpus.elements.all()),
+        ElementPath.objects.filter(element__in=corpus.elements.all()),
+        Selection.objects.filter(element__in=corpus.elements.all()),
+        corpus.elements.all(),
+        corpus.types.all(),
+    ]
+
+    with disconnect_signal(pre_delete, sender=Element, receiver=pre_delete_handler):
+        for queryset in querysets:
+            _delete_queryset(queryset, batch_size)
+
+    corpus.delete()
+    logger.info('Deleted corpus {}'.format(corpus_id))
diff --git a/arkindex/documents/tests/consumers/__init__.py b/arkindex/documents/tests/tasks/__init__.py
similarity index 100%
rename from arkindex/documents/tests/consumers/__init__.py
rename to arkindex/documents/tests/tasks/__init__.py
diff --git a/arkindex/documents/tests/consumers/test_corpus_consumer.py b/arkindex/documents/tests/tasks/test_corpus_delete.py
similarity index 97%
rename from arkindex/documents/tests/consumers/test_corpus_consumer.py
rename to arkindex/documents/tests/tasks/test_corpus_delete.py
index 619fa828a4e3eb4bd0df4dc2906abc202265c93d..5bf91a1d0a2adce3cf7697596ccf329693d471dd 100644
--- a/arkindex/documents/tests/consumers/test_corpus_consumer.py
+++ b/arkindex/documents/tests/tasks/test_corpus_delete.py
@@ -1,8 +1,8 @@
 from django.db.models.signals import pre_delete
 
 from arkindex.dataimport.models import Repository, RepositoryType
-from arkindex.documents.consumers import CorpusConsumer
 from arkindex.documents.models import Corpus, DataSource, Element, MLClass
+from arkindex.documents.tasks import corpus_delete
 from arkindex.project.tests import FixtureTestCase
 from arkindex_common.enums import DataImportMode, MetaType, TranscriptionType
 from arkindex_common.ml_tool import MLToolType
@@ -85,7 +85,7 @@ class TestDeleteCorpus(FixtureTestCase):
         self.corpus.save()
 
         with self.assertNumQueries(47):
-            CorpusConsumer({}).corpus_delete({'id': str(self.corpus.id)})
+            corpus_delete(self.corpus.id)
 
         # Ensure the command restores the signal receivers
         self.assertEqual(pre_delete.receivers, receivers)
diff --git a/arkindex/documents/tests/consumers/test_ml_results_consumer.py b/arkindex/documents/tests/tasks/test_ml_results_delete.py
similarity index 66%
rename from arkindex/documents/tests/consumers/test_ml_results_consumer.py
rename to arkindex/documents/tests/tasks/test_ml_results_delete.py
index 9d2af789418714cb054d932e586b8f9e9ea2f030..3fae2e6ef52560ec62444a6505dc07eb678d8beb 100644
--- a/arkindex/documents/tests/consumers/test_ml_results_consumer.py
+++ b/arkindex/documents/tests/tasks/test_ml_results_delete.py
@@ -1,16 +1,11 @@
-from unittest.mock import patch
-
-from django.test import override_settings
-from mock import AsyncMock
-
-from arkindex.documents.consumers import MLResultsConsumer
 from arkindex.documents.models import Classification, DataSource, Element, Entity, Transcription
+from arkindex.documents.tasks import ml_results_delete
 from arkindex.project.tests import FixtureTestCase
 from arkindex_common.enums import EntityType, MetaType, TranscriptionType
 from arkindex_common.ml_tool import MLToolType
 
 
-class TestMLResultsConsumer(FixtureTestCase):
+class TestMLResultsDelete(FixtureTestCase):
 
     @classmethod
     def setUpTestData(cls):
@@ -82,11 +77,9 @@ class TestMLResultsConsumer(FixtureTestCase):
 
     def test_delete_missing_parameters(self):
         with self.assertRaises(AssertionError):
-            MLResultsConsumer({}).ml_results_delete({})
+            ml_results_delete()
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_delete_corpus(self, get_layer_mock):
-        get_layer_mock.return_value.send = AsyncMock()
+    def test_delete_corpus(self):
         querysets = self._get_querysets(self.corpus.elements.all())
 
         for queryset in querysets:
@@ -96,7 +89,7 @@ class TestMLResultsConsumer(FixtureTestCase):
         self.assertEqual(self.page2.metadatas.count(), 2)
 
         with self.assertNumQueries(13):
-            MLResultsConsumer({}).ml_results_delete({'corpus_id': str(self.corpus.id)})
+            ml_results_delete(corpus_id=self.corpus.id)
 
         for queryset in querysets:
             self.assertFalse(queryset.exists())
@@ -110,29 +103,7 @@ class TestMLResultsConsumer(FixtureTestCase):
         self.assertEqual(metadata.type, MetaType.Text)
         self.assertIsNone(metadata.entity)
 
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'element': None,
-            'corpus': str(self.corpus.id),
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': True,
-            'drop': False,
-        })
-
-    @patch('arkindex.project.triggers.get_channel_layer')
-    @override_settings(ARKINDEX_FEATURES={'search': False})
-    def test_delete_corpus_no_search(self, get_layer_mock):
-        """
-        Assert deleting ML results for a corpus does not trigger an ES indexation without the search feature flag
-        """
-        MLResultsConsumer({}).ml_results_delete({'corpus_id': str(self.corpus.id)})
-        self.assertFalse(get_layer_mock().send.called)
-
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_delete_folder(self, get_layer_mock):
-        get_layer_mock.return_value.send = AsyncMock()
+    def test_delete_folder(self):
         folder1_querysets = self._get_querysets(
             Element.objects.filter(id=self.folder1.id).values('id').union(
                 Element.objects.get_descending(self.folder1.id).order_by().values('id')
@@ -150,7 +121,7 @@ class TestMLResultsConsumer(FixtureTestCase):
             self.assertTrue(queryset.exists())
 
         with self.assertNumQueries(16):
-            MLResultsConsumer({}).ml_results_delete({'element_id': str(self.folder1.id)})
+            ml_results_delete(element_id=self.folder1.id)
 
         for queryset in folder1_querysets:
             self.assertFalse(queryset.exists())
@@ -166,20 +137,7 @@ class TestMLResultsConsumer(FixtureTestCase):
         self.assertEqual(metadata.type, MetaType.Text)
         self.assertIsNotNone(metadata.entity)
 
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'element': str(self.folder1.id),
-            'corpus': str(self.corpus.id),
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': True,
-            'drop': False,
-        })
-
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_delete_page(self, get_layer_mock):
-        get_layer_mock.return_value.send = AsyncMock()
+    def test_delete_page(self):
         page1_querysets = self._get_querysets(
             Element.objects.filter(id=self.page1.id).values('id')
         )
@@ -195,7 +153,7 @@ class TestMLResultsConsumer(FixtureTestCase):
             self.assertTrue(queryset.exists())
 
         with self.assertNumQueries(16):
-            MLResultsConsumer({}).ml_results_delete({'element_id': str(self.page1.id)})
+            ml_results_delete(element_id=self.page1.id)
 
         for queryset in page1_querysets:
             self.assertFalse(queryset.exists())
@@ -210,23 +168,3 @@ class TestMLResultsConsumer(FixtureTestCase):
         metadata = self.page2.metadatas.get(value='Some entity 2')
         self.assertEqual(metadata.type, MetaType.Text)
         self.assertIsNotNone(metadata.entity)
-
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'element': str(self.page1.id),
-            'corpus': str(self.corpus.id),
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': True,
-            'drop': False,
-        })
-
-    @override_settings(ARKINDEX_FEATURES={'search': False})
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_delete_element_no_search(self, get_layer_mock):
-        """
-        Assert deleting ML results for an element does not trigger an ES indexation without the search feature flag
-        """
-        MLResultsConsumer({}).ml_results_delete({'element_id': str(self.folder1.id)})
-        self.assertFalse(get_layer_mock().send.called)
diff --git a/arkindex/documents/tests/consumers/test_reindex_consumer.py b/arkindex/documents/tests/tasks/test_reindex.py
similarity index 86%
rename from arkindex/documents/tests/consumers/test_reindex_consumer.py
rename to arkindex/documents/tests/tasks/test_reindex.py
index c91253ffb668177c0cf640523f333e1ec744e1b5..7a0be5ad990167714fc761412ce8329eee769c5d 100644
--- a/arkindex/documents/tests/consumers/test_reindex_consumer.py
+++ b/arkindex/documents/tests/tasks/test_reindex.py
@@ -3,15 +3,15 @@ from unittest.mock import patch
 from django.contrib.gis.geos import LinearRing
 from django.db.models import Q
 
-from arkindex.documents.consumers import ReindexConsumer
 from arkindex.documents.models import Corpus, DataSource, Element, Entity, Transcription
+from arkindex.documents.tasks import reindex_start
 from arkindex.project.tests import FixtureTestCase
 from arkindex_common.enums import EntityType, MetaType, TranscriptionType
 from arkindex_common.ml_tool import MLToolType
 
 
-@patch('arkindex.documents.consumers.Indexer')
-class TestReindexConsumer(FixtureTestCase):
+@patch('arkindex.documents.tasks.Indexer')
+class TestReindex(FixtureTestCase):
 
     @classmethod
     def setUpTestData(cls):
@@ -87,14 +87,12 @@ class TestReindexConsumer(FixtureTestCase):
         self._assert_all_transcriptions(ts_call)
 
     def test_reindex_all(self, mock):
-        ReindexConsumer({}).reindex_start({})
+        reindex_start()
         self.assertEqual(mock().drop_index.call_count, 0)
         self._assert_all(mock)
 
     def test_reindex_drop(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'drop': True,
-        })
+        reindex_start(drop=True)
         self.assertEqual(mock().drop_index.call_count, 3)
         mock().drop_index.assert_any_call(Element.es_document)
         mock().drop_index.assert_any_call(Entity.es_document)
@@ -102,39 +100,25 @@ class TestReindexConsumer(FixtureTestCase):
         self._assert_all(mock)
 
     def test_reindex_only_transcriptions(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'transcriptions': True,
-            'entities': False,
-            'elements': False,
-        })
+        reindex_start(transcriptions=True, entities=False, elements=False)
         self.assertEqual(mock().drop_index.call_count, 0)
         self.assertEqual(mock().run_index.call_count, 1)
         self._assert_all_transcriptions(mock().run_index.call_args)
 
     def test_reindex_only_elements(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'transcriptions': False,
-            'entities': False,
-            'elements': True,
-        })
+        reindex_start(transcriptions=False, entities=False, elements=True)
         self.assertEqual(mock().drop_index.call_count, 0)
         self.assertEqual(mock().run_index.call_count, 1)
         self._assert_all_elements(mock().run_index.call_args)
 
     def test_reindex_only_entities(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'transcriptions': False,
-            'entities': True,
-            'elements': False,
-        })
+        reindex_start(transcriptions=False, entities=True, elements=False)
         self.assertEqual(mock().drop_index.call_count, 0)
         self.assertEqual(mock().run_index.call_count, 1)
         self._assert_all_entities(mock().run_index.call_args)
 
     def test_reindex_corpus(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'corpus': str(self.corpus.id),
-        })
+        reindex_start(corpus_id=self.corpus.id)
         self.assertEqual(mock().drop_index.call_count, 0)
         self.assertEqual(mock().run_index.call_count, 3)
         elements_call, entities_call, ts_call = sorted(mock().run_index.call_args_list, key=repr)
@@ -155,9 +139,7 @@ class TestReindexConsumer(FixtureTestCase):
         self.assertDictEqual(kwargs, {'bulk_size': 400})
 
     def test_reindex_element(self, mock):
-        ReindexConsumer({}).reindex_start({
-            'element': str(self.folder.id),
-        })
+        reindex_start(element_id=self.folder.id)
         self.assertEqual(mock().drop_index.call_count, 0)
         self.assertEqual(mock().run_index.call_count, 3)
         elements_call, entities_call, ts_call = sorted(mock().run_index.call_args_list, key=repr)
diff --git a/arkindex/documents/tests/test_admin_api.py b/arkindex/documents/tests/test_admin_api.py
index aa8c3f2ab64089ac86ac2edeb0fd7be8294ff4c5..fc85a52001048930c2d9a58587332c8012de286a 100644
--- a/arkindex/documents/tests/test_admin_api.py
+++ b/arkindex/documents/tests/test_admin_api.py
@@ -1,8 +1,7 @@
-from unittest.mock import patch
+from unittest.mock import call, patch
 
 from django.test import override_settings
 from django.urls import reverse
-from mock import AsyncMock
 from rest_framework import status
 
 from arkindex.documents.models import Corpus
@@ -55,27 +54,22 @@ class TestAdminAPI(FixtureTestCase):
             'non_field_errors': ['The selected element is not in the selected corpus.'],
         })
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_reindex(self, get_layer_mock):
-        get_layer_mock.return_value.send = AsyncMock()
-
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_reindex(self, delay_mock):
         self.client.force_login(self.superuser)
         response = self.client.post(reverse('api:reindex-start'), {})
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
-        get_layer_mock.return_value.send.assert_called_once_with(
-            'reindex',
-            {
-                'element': None,
-                'corpus': None,
-                'entity': None,
-                'transcriptions': True,
-                'elements': True,
-                'entities': True,
-                'drop': False,
-                'type': 'reindex.start',
-            },
-        )
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=None,
+            corpus_id=None,
+            entity_id=None,
+            transcriptions=True,
+            elements=True,
+            entities=True,
+            drop=False,
+        ))
 
     @override_settings(ARKINDEX_FEATURES={'search': False})
     def test_reindex_no_search(self):
diff --git a/arkindex/documents/tests/test_bulk_element_transcriptions.py b/arkindex/documents/tests/test_bulk_element_transcriptions.py
index a7b94ec6404bb72ece4849e0989679cb5c7dd41c..22dbc75a724f6ae81f664e9e9e3b666144eece33 100644
--- a/arkindex/documents/tests/test_bulk_element_transcriptions.py
+++ b/arkindex/documents/tests/test_bulk_element_transcriptions.py
@@ -1,10 +1,9 @@
 import uuid
-from unittest.mock import patch
+from unittest.mock import call, patch
 
 from django.db.models import Count
 from django.test import override_settings
 from django.urls import reverse
-from mock import AsyncMock
 from rest_framework import status
 
 from arkindex.dataimport.models import WorkerVersion
@@ -28,13 +27,11 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
         cls.private_page = cls.private_corpus.elements.create(type=cls.page.type)
         cls.worker_version = WorkerVersion.objects.get(worker__slug='reco')
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_bulk_transcriptions(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_bulk_transcriptions(self, delay_mock):
         """
         Bulk creates a list of element with an attached transcription generated by a worker_version
         """
-        get_layer_mock.return_value.send = AsyncMock()
-
         # Create a manual transcription on the element
         self.line.transcriptions.create(
             text='A manual transcription',
@@ -87,16 +84,16 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
                 (TranscriptionType.Line, ('I <3 JavaScript'), None, self.worker_version.id)
             ]
         )
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'corpus': None,
-            'element': str(self.page.id),
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': False,
-            'drop': False,
-        })
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=str(self.page.id),
+            corpus_id=None,
+            entity_id=None,
+            transcriptions=True,
+            elements=True,
+            entities=False,
+            drop=False,
+        ))
 
     @override_settings(ARKINDEX_FEATURES={'search': False})
     def test_bulk_transcriptions_load(self):
@@ -137,14 +134,12 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
         # Each annotated element has a transcription
         self.assertEqual(created_elts.annotate(ts_count=Count('transcriptions')).filter(ts_count=1).count(), 100)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_bulk_transcriptions_similar_zone(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_bulk_transcriptions_similar_zone(self, delay_mock):
         """
         Pushing a transcription matching an element type and zone reuses this element
         Does not erase present transcriptions, even from the same worker
         """
-        get_layer_mock.return_value.send = AsyncMock()
-
         # Create a manual transcription on the element
         self.line.transcriptions.create(
             text='A manual transcription',
diff --git a/arkindex/documents/tests/test_corpus.py b/arkindex/documents/tests/test_corpus.py
index 27ea5fa446a36674fc47ac774154d5618a9b87f5..68058b3044ab79cf3ff951f99a82055538836f4e 100644
--- a/arkindex/documents/tests/test_corpus.py
+++ b/arkindex/documents/tests/test_corpus.py
@@ -1,10 +1,8 @@
 import datetime
-from unittest.mock import patch
+from unittest.mock import call, patch
 
-import mock
 from django.contrib.auth.models import AnonymousUser
 from django.urls import reverse
-from mock import AsyncMock
 from rest_framework import status
 
 from arkindex.documents.models import Corpus, Element, Right
@@ -63,11 +61,11 @@ class TestCorpus(FixtureAPITestCase):
         cls.anon = AnonymousUser()
 
         cls.corpus_public = Corpus.objects.get(name='Unit Tests', public=True)
-        with mock.patch('django.utils.timezone.now') as mock_now:
+        with patch('django.utils.timezone.now') as mock_now:
             mock_now.return_value = FAKE_NOW
             cls.corpus_private = Corpus.objects.create(name='B Private')
         cls.user.corpus_right.create(corpus=cls.corpus_private, user=cls.user, can_write=True)
-        with mock.patch('django.utils.timezone.now') as mock_now:
+        with patch('django.utils.timezone.now') as mock_now:
             mock_now.return_value = FAKE_NOW
             cls.corpus_hidden = Corpus.objects.create(name='C Hidden')
 
@@ -385,18 +383,14 @@ class TestCorpus(FixtureAPITestCase):
         response = self.client.delete(reverse('api:corpus-retrieve', kwargs={'pk': self.corpus_private.id}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_delete(self, get_layer_mock):
-        get_layer_mock.return_value.send = AsyncMock()
+    @patch('arkindex.project.triggers.tasks.corpus_delete.delay')
+    def test_delete(self, delay_mock):
         self.client.force_login(self.user)
         self.user.corpus_right.filter(corpus=self.corpus_private).update(can_admin=True)
         response = self.client.delete(reverse('api:corpus-retrieve', kwargs={'pk': self.corpus_private.id}))
         self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
-        get_layer_mock.return_value.send.assert_called_once_with(
-            'corpus',
-            {
-                'type': 'corpus.delete',
-                'id': str(self.corpus_private.id),
-                'batch_size': 1000,
-            }
-        )
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            corpus_id=str(self.corpus_private.id),
+            batch_size=1000,
+        ))
diff --git a/arkindex/documents/tests/test_create_transcriptions.py b/arkindex/documents/tests/test_create_transcriptions.py
index 80570f247072ee817616772f0a9ce359384e90c5..f8be8ede0536f8f57e4c7f929a285aa288885287 100644
--- a/arkindex/documents/tests/test_create_transcriptions.py
+++ b/arkindex/documents/tests/test_create_transcriptions.py
@@ -1,9 +1,8 @@
-from unittest.mock import patch
+from unittest.mock import call, patch
 from uuid import uuid4
 
 from django.test import override_settings
 from django.urls import reverse
-from mock import AsyncMock
 from rest_framework import status
 
 from arkindex.dataimport.models import WorkerVersion
@@ -50,8 +49,8 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         )
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_create_transcription_no_element(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_transcription_no_element(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.post(
             reverse('api:transcription-create', kwargs={'pk': uuid4()}),
@@ -59,14 +58,13 @@ class TestTranscriptionCreate(FixtureAPITestCase):
             data={'type': TranscriptionType.Word.value, 'text': 'NEKUDOTAYIM'}
         )
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
+        self.assertFalse(delay_mock.called)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_create_manual_transcription(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_manual_transcription(self, delay_mock):
         """
         Checks the view creates a manual transcription and runs ES indexing
         """
-        get_layer_mock.return_value.send = AsyncMock()
-
         self.client.force_login(self.user)
         with self.assertNumQueries(5):
             response = self.client.post(
@@ -90,24 +88,22 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         self.assertEqual(new_ts.worker_version, None)
         self.assertTrue(self.line.transcriptions.filter(pk=new_ts.id).exists())
 
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'element': str(self.line.id),
-            'corpus': None,
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': False,
-            'drop': False,
-        })
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=str(self.line.id),
+            corpus_id=None,
+            entity_id=None,
+            transcriptions=True,
+            elements=True,
+            entities=False,
+            drop=False,
+        ))
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_create_duplicated_transcription(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_duplicated_transcription(self, delay_mock):
         """
         Check the view creates a new manual transcriptions with a similar text and element
         """
-        get_layer_mock.return_value.send = AsyncMock()
-
         self.client.force_login(self.user)
         ts = self.line.transcriptions.create(text='GLOUBIBOULGA', type=TranscriptionType.Line.value)
         with self.assertNumQueries(5):
@@ -125,8 +121,8 @@ class TestTranscriptionCreate(FixtureAPITestCase):
             2
         )
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_create_transcription_wrong_type(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_transcription_wrong_type(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.post(
             reverse('api:transcription-create', kwargs={'pk': self.line.id}),
@@ -137,10 +133,11 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         self.assertDictEqual(response.json(), {
             'type': ['Value is not of type TranscriptionType']
         })
+        self.assertFalse(delay_mock.called)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     @override_settings(ARKINDEX_FEATURES={'search': False})
-    def test_create_transcription_no_search(self, get_layer_mock):
+    def test_create_transcription_no_search(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.post(
             reverse('api:transcription-create', kwargs={'pk': self.line.id}),
@@ -148,7 +145,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
             data={'type': TranscriptionType.Line.value, 'text': 'A classy text line'}
         )
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
-        self.assertFalse(get_layer_mock().send.called)
+        self.assertFalse(delay_mock.called)
 
     def test_create_transcription_worker_version_non_internal(self):
         """
@@ -172,13 +169,11 @@ class TestTranscriptionCreate(FixtureAPITestCase):
             ]
         })
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_create_transcription_worker_version(self, get_layer_mock):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_transcription_worker_version(self, delay_mock):
         """
         Creates a transcription with a worker version triggers its indexation on ElasticSearch
         """
-        get_layer_mock.return_value.send = AsyncMock()
-
         self.client.force_login(self.internal_user)
         response = self.client.post(
             reverse('api:transcription-create', kwargs={'pk': self.line.id}),
@@ -202,16 +197,16 @@ class TestTranscriptionCreate(FixtureAPITestCase):
             'worker_version_id': str(self.worker_version.id),
         })
 
-        get_layer_mock().send.assert_called_once_with('reindex', {
-            'type': 'reindex.start',
-            'element': str(self.line.id),
-            'corpus': None,
-            'entity': None,
-            'transcriptions': True,
-            'elements': True,
-            'entities': False,
-            'drop': False,
-        })
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=str(self.line.id),
+            corpus_id=None,
+            entity_id=None,
+            transcriptions=True,
+            elements=True,
+            entities=False,
+            drop=False,
+        ))
 
     def test_manual_transcription_forbidden_type(self):
         """
diff --git a/arkindex/documents/tests/test_entities_api.py b/arkindex/documents/tests/test_entities_api.py
index e081c5ce3c9f382597c2225915e4815a04784fd7..bc210f8a581746c6e712dd61bf1eb6cf4a9c04fc 100644
--- a/arkindex/documents/tests/test_entities_api.py
+++ b/arkindex/documents/tests/test_entities_api.py
@@ -1,5 +1,5 @@
 import uuid
-from unittest.mock import Mock, call, patch
+from unittest.mock import call, patch
 
 from django.contrib.gis.geos import LinearRing
 from django.test import override_settings
@@ -24,8 +24,6 @@ from arkindex.project.tests import FixtureAPITestCase
 from arkindex_common.enums import MetaType
 
 
-# Mock channels asynchronous triggered tasks
-@patch('arkindex.project.triggers.async_to_sync', Mock())
 class TestEntitiesAPI(FixtureAPITestCase):
 
     @classmethod
@@ -241,7 +239,8 @@ class TestEntitiesAPI(FixtureAPITestCase):
             'id': [str(self.corpus.id)]
         })
 
-    def test_create_entity_person(self):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_entity_person(self, delay_mock):
         self.entity_source.internal = True
         self.entity_source.save()
         data = {
@@ -261,8 +260,19 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.assertEqual(entity.name, 'entity')
         self.assertEqual(entity.raw_dates, None)
         self.assertEqual(entity.worker_version, self.worker_version)
-
-    def test_create_entity_number(self):
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=None,
+            entity_id=str(entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
+
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_entity_number(self, delay_mock):
         self.entity_source.internal = True
         self.entity_source.save()
         data = {
@@ -282,8 +292,19 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.assertEqual(entity.name, '300g')
         self.assertEqual(entity.raw_dates, None)
         self.assertEqual(entity.worker_version, self.worker_version)
-
-    def test_create_entity_date(self):
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=None,
+            entity_id=str(entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
+
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_entity_date(self, delay_mock):
         self.entity_source.internal = True
         self.entity_source.save()
         data = {
@@ -303,6 +324,16 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.assertEqual(entity.name, '1789')
         self.assertEqual(entity.raw_dates, entity.name)
         self.assertEqual(entity.worker_version, self.worker_version)
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=None,
+            entity_id=str(entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
 
     def test_create_entity_requires_login(self):
         data = {
@@ -318,7 +349,8 @@ class TestEntitiesAPI(FixtureAPITestCase):
         response = self.client.post(reverse('api:entity-create'), data=data, format='json')
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_create_entity_with_worker_version(self):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_create_entity_with_worker_version(self, delay_mock):
         data = {
             'name': '1789',
             'type': EntityType.Date.value,
@@ -336,6 +368,16 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.assertEqual(entity.name, '1789')
         self.assertEqual(entity.source, None)
         self.assertEqual(entity.worker_version, self.worker_version)
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=None,
+            entity_id=str(entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
 
     def test_create_link(self):
         child = Entity.objects.create(
@@ -744,7 +786,8 @@ class TestEntitiesAPI(FixtureAPITestCase):
         response = self.client.delete(reverse('api:entity-details', kwargs={'pk': str(self.entity_bis.id)}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    def test_validated_entity(self):
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_validated_entity(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.patch(
             reverse('api:entity-details', kwargs={'pk': self.entity_bis.id}),
@@ -754,8 +797,19 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.entity_bis.refresh_from_db()
         self.assertEqual(self.entity_bis.validated, True)
         self.assertEqual(self.entity_bis.moderator, self.user)
-
-    def test_unvalidated_entity(self):
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=None,
+            corpus_id=None,
+            entity_id=str(self.entity_bis.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
+
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_unvalidated_entity(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.patch(
             reverse('api:entity-details', kwargs={'pk': self.entity_bis.id}),
@@ -765,6 +819,16 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.entity_bis.refresh_from_db()
         self.assertEqual(self.entity_bis.validated, False)
         self.assertEqual(self.entity_bis.moderator, None)
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            element_id=None,
+            corpus_id=None,
+            entity_id=str(self.entity_bis.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
 
     def test_validated_entity_not_verified(self):
         response = self.client.patch(
@@ -838,7 +902,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         response = self.client.get(reverse('api:element-links', kwargs={'pk': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}))
         self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
 
-    @patch('arkindex.documents.api.entities.reindex_start')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     def test_entity_create_reindex(self, reindex_mock):
         """
         Created entities are indexed into ElasticSearch
@@ -854,12 +918,17 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(reindex_mock.call_count, 1)
         entity = Entity.objects.get(id=response.json()['id'])
-        self.assertEqual(
-            reindex_mock.call_args,
-            call(entity=entity, entities=True)
-        )
-
-    @patch('arkindex.documents.serializers.entities.reindex_start')
+        self.assertEqual(reindex_mock.call_args, call(
+            element_id=None,
+            corpus_id=None,
+            entity_id=str(entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
+
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     def test_entity_update_index(self, reindex_mock):
         self.client.force_login(self.user)
         response = self.client.patch(
@@ -870,10 +939,15 @@ class TestEntitiesAPI(FixtureAPITestCase):
         self.entity.refresh_from_db()
         self.assertEqual(self.entity.name, 'renamed')
         self.assertEqual(reindex_mock.call_count, 1)
-        self.assertEqual(
-            reindex_mock.call_args,
-            call(entity=self.entity, entities=True)
-        )
+        self.assertEqual(reindex_mock.call_args, call(
+            element_id=None,
+            corpus_id=None,
+            entity_id=str(self.entity.id),
+            elements=False,
+            transcriptions=False,
+            entities=True,
+            drop=False,
+        ))
 
     @patch('arkindex.documents.api.entities.ESEntity')
     def test_entity_delete_index(self, es_entity_mock):
diff --git a/arkindex/documents/tests/test_metadata.py b/arkindex/documents/tests/test_metadata.py
index 9212a3bbe2b1429c790f8a3dfc18450d661d1938..989e83bcf0b5b261b1cebb32fdcfccb88a2788c1 100644
--- a/arkindex/documents/tests/test_metadata.py
+++ b/arkindex/documents/tests/test_metadata.py
@@ -446,20 +446,20 @@ class TestMetaData(FixtureAPITestCase):
         ref_metadata.refresh_from_db()
         self.assertEqual(ref_metadata.type, MetaType.Reference)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     @override_settings(ARKINDEX_FEATURES={'search': False})
-    def test_reference_metadata_create_no_search(self, get_layer_mock):
+    def test_reference_metadata_create_no_search(self, delay_mock):
         self.client.force_login(self.user)
         response = self.client.post(
             reverse('api:element-metadata', kwargs={'pk': str(self.vol.id)}),
             data={'type': 'reference', 'name': '_id', 'value': '42'}
         )
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
-        self.assertFalse(get_layer_mock().send.called)
+        self.assertFalse(delay_mock.called)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     @override_settings(ARKINDEX_FEATURES={'search': False})
-    def test_reference_metadata_update_no_search(self, get_layer_mock):
+    def test_reference_metadata_update_no_search(self, delay_mock):
         self.client.force_login(self.superuser)
         ref_metadata = self.vol.metadatas.create(type=MetaType.Reference, name='_id', value='42')
         response = self.client.patch(
@@ -467,16 +467,16 @@ class TestMetaData(FixtureAPITestCase):
             data={'name': 'edition'},
         )
         self.assertEqual(response.status_code, status.HTTP_200_OK)
-        self.assertFalse(get_layer_mock().send.called)
+        self.assertFalse(delay_mock.called)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
     @override_settings(ARKINDEX_FEATURES={'search': False})
-    def test_reference_metadata_delete_no_search(self, get_layer_mock):
+    def test_reference_metadata_delete_no_search(self, delay_mock):
         self.client.force_login(self.user)
         ref_metadata = self.vol.metadatas.create(type=MetaType.Reference, name='_id', value='42')
         response = self.client.delete(reverse('api:metadata-edit', kwargs={'pk': str(ref_metadata.id)}))
         self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
-        self.assertFalse(get_layer_mock().send.called)
+        self.assertFalse(delay_mock.called)
 
     def test_create_metadata_entity(self):
         self.client.force_login(self.superuser)
diff --git a/arkindex/documents/tests/test_ml_results.py b/arkindex/documents/tests/test_ml_results.py
index d16776edf1058b170573ae569108a6ec17ff534f..5cb96df53543aa809d7867f6d5b770c567e7aa17 100644
--- a/arkindex/documents/tests/test_ml_results.py
+++ b/arkindex/documents/tests/test_ml_results.py
@@ -1,7 +1,7 @@
-from unittest.mock import patch
+from unittest.mock import call, patch
 
+from django.test import override_settings
 from django.urls import reverse
-from mock import AsyncMock
 from rest_framework import status
 
 from arkindex.documents.models import DataSource, Entity, Transcription
@@ -135,18 +135,41 @@ class TestMLResults(FixtureTestCase):
         response = self.client.delete(reverse('api:corpus-ml-stats', kwargs={'pk': str(self.corpus.id)}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_corpus_destroy_results(self, get_layer_mock):
-        get_layer_mock().send = AsyncMock()
+    @patch('arkindex.project.triggers.tasks.ml_results_delete.delay')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_corpus_destroy_results(self, reindex_delay_mock, delete_delay_mock):
         self.client.force_login(self.superuser)
+        delete_delay_mock.return_value = 'a'
         response = self.client.delete(reverse('api:corpus-ml-stats', kwargs={'pk': str(self.corpus.id)}))
         self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
-        get_layer_mock().send.assert_called_once_with('ml_results', {
-            'type': 'ml_results.delete',
-            'corpus_id': str(self.corpus.id),
-            'element_id': None,
-            'batch_size': 1000,
-        })
+        self.assertEqual(delete_delay_mock.call_count, 1)
+        self.assertEqual(delete_delay_mock.call_args, call(
+            corpus_id=str(self.corpus.id),
+            element_id=None,
+            batch_size=1000,
+        ))
+        self.assertEqual(reindex_delay_mock.call_count, 1)
+        self.assertEqual(reindex_delay_mock.call_args, call(
+            corpus_id=str(self.corpus.id),
+            element_id=None,
+            depends_on='a',
+        ))
+
+    @override_settings(ARKINDEX_FEATURES={'search': False})
+    @patch('arkindex.project.triggers.tasks.ml_results_delete.delay')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_corpus_destroy_results_no_search(self, reindex_delay_mock, delete_delay_mock):
+        self.client.force_login(self.superuser)
+        delete_delay_mock.return_value = 'a'
+        response = self.client.delete(reverse('api:corpus-ml-stats', kwargs={'pk': str(self.corpus.id)}))
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(delete_delay_mock.call_count, 1)
+        self.assertEqual(delete_delay_mock.call_args, call(
+            corpus_id=str(self.corpus.id),
+            element_id=None,
+            batch_size=1000,
+        ))
+        self.assertFalse(reindex_delay_mock.called)
 
     def test_element_destroy_results_requires_login(self):
         response = self.client.delete(reverse('api:element-ml-stats', kwargs={'pk': str(self.page.id)}))
@@ -157,15 +180,38 @@ class TestMLResults(FixtureTestCase):
         response = self.client.delete(reverse('api:element-ml-stats', kwargs={'pk': str(self.page.id)}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
-    @patch('arkindex.project.triggers.get_channel_layer')
-    def test_element_destroy_results(self, get_layer_mock):
-        get_layer_mock().send = AsyncMock()
+    @patch('arkindex.project.triggers.tasks.ml_results_delete.delay')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_element_destroy_results(self, reindex_delay_mock, delete_delay_mock):
+        self.client.force_login(self.superuser)
+        delete_delay_mock.return_value = 'a'
+        response = self.client.delete(reverse('api:element-ml-stats', kwargs={'pk': str(self.page.id)}))
+        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+        self.assertEqual(delete_delay_mock.call_count, 1)
+        self.assertEqual(delete_delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=str(self.page.id),
+            batch_size=1000,
+        ))
+        self.assertEqual(reindex_delay_mock.call_count, 1)
+        self.assertEqual(reindex_delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=str(self.page.id),
+            depends_on='a',
+        ))
+
+    @override_settings(ARKINDEX_FEATURES={'search': False})
+    @patch('arkindex.project.triggers.tasks.ml_results_delete.delay')
+    @patch('arkindex.project.triggers.tasks.reindex_start.delay')
+    def test_element_destroy_results_no_search(self, reindex_delay_mock, delete_delay_mock):
         self.client.force_login(self.superuser)
+        delete_delay_mock.return_value = 'a'
         response = self.client.delete(reverse('api:element-ml-stats', kwargs={'pk': str(self.page.id)}))
         self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
-        get_layer_mock().send.assert_called_once_with('ml_results', {
-            'type': 'ml_results.delete',
-            'corpus_id': None,
-            'element_id': str(self.page.id),
-            'batch_size': 1000,
-        })
+        self.assertEqual(delete_delay_mock.call_count, 1)
+        self.assertEqual(delete_delay_mock.call_args, call(
+            corpus_id=None,
+            element_id=str(self.page.id),
+            batch_size=1000,
+        ))
+        self.assertFalse(reindex_delay_mock.called)
diff --git a/arkindex/project/asgi.py b/arkindex/project/asgi.py
deleted file mode 100644
index a188f2de8559daea020d4060d5d78d783489c860..0000000000000000000000000000000000000000
--- a/arkindex/project/asgi.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""
-ASGI entrypoint. Configures Django and then runs the application
-defined in the ASGI_APPLICATION setting.
-"""
-
-import os
-
-import django
-from channels.routing import get_default_application
-from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "arkindex.project.settings")
-os.environ['ALL_CHECKS'] = 'true'
-django.setup()
-
-# Enable sentry middleware, no integration is available when running ASGI
-application = SentryAsgiMiddleware(get_default_application())
diff --git a/arkindex/project/config.py b/arkindex/project/config.py
index ef5c64863107a57d232ae05860a3fbff034f8660..7ac6818b779b76cfb939d45a64fd96d7acad2384 100644
--- a/arkindex/project/config.py
+++ b/arkindex/project/config.py
@@ -83,7 +83,10 @@ def get_settings_parser(base_dir):
 
     redis_parser = parser.add_subparser('redis', default={})
     redis_parser.add_option('host', type=str, default='localhost')
-    redis_parser.add_option('capacity', type=int, default=1000)
+    redis_parser.add_option('port', type=int, default=6379)
+    redis_parser.add_option('db', type=int, default=0)
+    redis_parser.add_option('password', type=str, default=None)
+    redis_parser.add_option('timeout', type=int, default=1800)
 
     csrf_parser = parser.add_subparser('csrf', default={})
     csrf_parser.add_option('cookie_name', type=str, default='arkindex.csrf')
diff --git a/arkindex/project/consumers.py b/arkindex/project/consumers.py
deleted file mode 100644
index 45b64e7d5b47855ebbc08d8755ff2f3bf826f557..0000000000000000000000000000000000000000
--- a/arkindex/project/consumers.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from channels.exceptions import DenyConnection
-from channels.generic.websocket import AsyncJsonWebsocketConsumer
-
-
-class ConsumerPermission(object):
-    """
-    Base class for permission classes on Channels consumers.
-    """
-
-    def has_permission(self, consumer):
-        return True
-
-
-class IsAuthenticated(ConsumerPermission):
-    """
-    Restrict connections to authenticated users.
-    """
-
-    def has_permission(self, consumer):
-        if not consumer.user.is_authenticated:
-            return False
-        return super().has_permission(consumer)
-
-
-class IsAdmin(ConsumerPermission):
-    """
-    Restrict connections to admin users.
-    """
-
-    def has_permission(self, consumer):
-        if not consumer.user.is_authenticated or not consumer.user.is_admin:
-            return False
-        return super().has_permission(consumer)
-
-
-class AuthConsumer(AsyncJsonWebsocketConsumer):
-    permission_classes = ()
-
-    async def connect(self):
-        self.user = self.scope["user"]
-        for permission_class in self.permission_classes:
-            if not permission_class().has_permission(self):
-                # Raise instead of using self.close() because it allows subclasses
-                # to call await super().connect() and continue only when connections are accepted
-                raise DenyConnection
-        await self.accept()
diff --git a/arkindex/project/routing.py b/arkindex/project/routing.py
deleted file mode 100644
index 841bcbba29ae83860273de3c29dec91a73bbed70..0000000000000000000000000000000000000000
--- a/arkindex/project/routing.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from channels.auth import AuthMiddlewareStack
-from channels.routing import ChannelNameRouter, ProtocolTypeRouter, URLRouter
-from channels.security.websocket import AllowedHostsOriginValidator
-
-from arkindex.documents.consumers import CorpusConsumer, MLResultsConsumer, ReindexConsumer
-from arkindex.project.triggers import CORPUS_CHANNEL, ML_RESULTS_CHANNEL, REINDEX_CHANNEL
-
-application = ProtocolTypeRouter({
-    'websocket': AllowedHostsOriginValidator(
-        AuthMiddlewareStack(
-            URLRouter([
-            ]),
-        ),
-    ),
-    'channel': ChannelNameRouter({
-        REINDEX_CHANNEL: ReindexConsumer,
-        ML_RESULTS_CHANNEL: MLResultsConsumer,
-        CORPUS_CHANNEL: CorpusConsumer,
-    })
-})
diff --git a/arkindex/project/settings.py b/arkindex/project/settings.py
index 17898cf5dfe657b38cf1323bc1b45ec400ed7157..7e13be7000de0b8dfbd37f48e9704623b4b0ba65 100644
--- a/arkindex/project/settings.py
+++ b/arkindex/project/settings.py
@@ -97,10 +97,10 @@ INSTALLED_APPS = [
     'django_admin_hstore_widget',
 
     # Tools
-    'channels',
     'rest_framework',
     'rest_framework.authtoken',
     'corsheaders',
+    'django_rq',
     'ponos',
 
     # Our apps
@@ -289,17 +289,14 @@ elif conf['cache']['type'] == CacheType.Dummy:
         }
     }
 
-# Django Channels layer using Redis
-CHANNEL_LAYERS = {
-    "default": {
-        "BACKEND": "channels_redis.core.RedisChannelLayer",
-        "CONFIG": {
-            "hosts": [
-                (conf['redis']['host'], 6379)
-            ],
-            "capacity": conf['redis']['capacity'],
-        },
-    },
+RQ_QUEUES = {
+    'default': {
+        'HOST': conf['redis']['host'],
+        'PORT': conf['redis']['port'],
+        'DB': conf['redis']['db'],
+        'PASSWORD': conf['redis']['password'],
+        'DEFAULT_TIMEOUT': conf['redis']['timeout'],
+    }
 }
 
 LOGGING = {
@@ -332,11 +329,7 @@ LOGGING = {
             'level': 'INFO',
             'propagate': True,
         },
-        'daphne.server': {
-            'handlers': ['console_debug'],
-            'level': 'INFO',
-        },
-        'arkindex.documents.consumers': {
+        'arkindex.documents.tasks': {
             'handlers': ['console'],
             'level': 'INFO',
         },
@@ -474,6 +467,10 @@ if TEST_ENV:
     PONOS_PRIVATE_KEY = None
     LOCAL_IMAGESERVER_ID = 1
 
+    # Causes RQ tasks to run on the main thread
+    for queue in RQ_QUEUES.values():
+        queue['ASYNC'] = False
+
     # Turn Django's UnorderedObjectListWarning into exceptions
     warnings.filterwarnings('error', category=RuntimeWarning, module='django.core.paginator')
     warnings.filterwarnings('error', category=RuntimeWarning, module='rest_framework.pagination')
diff --git a/arkindex/project/tests/config_samples/defaults.yaml b/arkindex/project/tests/config_samples/defaults.yaml
index 7fb73a3e936666c916b2be2503544786da78d3ea..55c3c3274587df20cb64a1df37d22d6310392c3f 100644
--- a/arkindex/project/tests/config_samples/defaults.yaml
+++ b/arkindex/project/tests/config_samples/defaults.yaml
@@ -46,8 +46,11 @@ ponos:
   default_env: {}
   private_key: /somewhere/backend/arkindex/ponos.key
 redis:
-  capacity: 1000
+  db: 0
   host: localhost
+  password: null
+  port: 6379
+  timeout: 1800
 s3:
   access_key_id: null
   endpoint: null
diff --git a/arkindex/project/tests/config_samples/errors.yaml b/arkindex/project/tests/config_samples/errors.yaml
index d313e474f0a8bdbc129b8ba8936734c607bd7ec8..b9406ee49a059a8e50af5246aae5b18a59bca8bc 100644
--- a/arkindex/project/tests/config_samples/errors.yaml
+++ b/arkindex/project/tests/config_samples/errors.yaml
@@ -36,8 +36,11 @@ ponos:
   default_env: {}
   private_key: /dev/zero
 redis:
-  capacity: over nine thousand
   host: radish
+  port: over nine thousand
+  db: idk
+  password: yes
+  timeout: sauce
 s3:
   endpoint: null
   ponos_artifacts_bucket: {}
diff --git a/arkindex/project/tests/config_samples/expected_errors.yaml b/arkindex/project/tests/config_samples/expected_errors.yaml
index ac8c877184e80c8b627a17f736d6f456d6133a8f..afaf81909ddfa6dcd7f86a6799402fc3470de8a6 100644
--- a/arkindex/project/tests/config_samples/expected_errors.yaml
+++ b/arkindex/project/tests/config_samples/expected_errors.yaml
@@ -14,7 +14,9 @@ email:
 features:
   sv_cheats: This option does not exist
 redis:
-  capacity: "invalid literal for int() with base 10: 'over nine thousand'"
+  port: "invalid literal for int() with base 10: 'over nine thousand'"
+  db: "invalid literal for int() with base 10: 'idk'"
+  timeout: "invalid literal for int() with base 10: 'sauce'"
 session:
   cookie_samesite: "'foo' is not a valid CookieSameSiteOption"
 static:
diff --git a/arkindex/project/tests/config_samples/override.yaml b/arkindex/project/tests/config_samples/override.yaml
index 910f198ac65176642657c993a6f611d5df7f122f..fcadbc530ceedfd106cbd14edb82bd5b6bbf034e 100644
--- a/arkindex/project/tests/config_samples/override.yaml
+++ b/arkindex/project/tests/config_samples/override.yaml
@@ -61,8 +61,11 @@ ponos:
     A: B
   private_key: /a/b/c
 redis:
-  capacity: 9001
+  db: 42
   host: radish
+  password: massword
+  port: 9001
+  timeout: 1337
 s3:
   access_key_id: abcd
   endpoint: http://somewhere
diff --git a/arkindex/project/triggers.py b/arkindex/project/triggers.py
index 2b69e9a92137665cdec9eaca5995c3635c46b47f..933f33e989093a3ae33639093db5996f5c3e39ef 100644
--- a/arkindex/project/triggers.py
+++ b/arkindex/project/triggers.py
@@ -4,20 +4,11 @@ Helper methods to trigger tasks in asynchronous workers
 from typing import Union
 from uuid import UUID
 
-from asgiref.sync import async_to_sync
-from channels.layers import get_channel_layer
 from django.conf import settings
 
+from arkindex.documents import tasks
 from arkindex.documents.models import Corpus, Element, Entity
 
-REINDEX_CHANNEL = 'reindex'
-ML_RESULTS_CHANNEL = 'ml_results'
-CORPUS_CHANNEL = 'corpus'
-
-ACTION_REINDEX_START = 'reindex.start'
-ACTION_ML_RESULTS_DELETE = 'ml_results.delete'
-ACTION_CORPUS_DELETE = 'corpus.delete'
-
 
 def reindex_start(*,
                   corpus: Union[Corpus, UUID, str] = None,
@@ -61,16 +52,15 @@ def reindex_start(*,
     elif entity:
         entity_id = str(entity)
 
-    async_to_sync(get_channel_layer().send)(REINDEX_CHANNEL, {
-        'type': ACTION_REINDEX_START,
-        'element': element_id,
-        'corpus': corpus_id,
-        'entity': entity_id,
-        'transcriptions': transcriptions,
-        'elements': elements,
-        'entities': entities,
-        'drop': drop,
-    })
+    tasks.reindex_start.delay(
+        element_id=element_id,
+        corpus_id=corpus_id,
+        entity_id=entity_id,
+        transcriptions=transcriptions,
+        elements=elements,
+        entities=entities,
+        drop=drop,
+    )
 
 
 def ml_results_delete(*,
@@ -95,12 +85,10 @@ def ml_results_delete(*,
 
     assert element_id or corpus_id, 'Missing element or corpus ID'
 
-    async_to_sync(get_channel_layer().send)(ML_RESULTS_CHANNEL, {
-        'type': ACTION_ML_RESULTS_DELETE,
-        'element_id': element_id,
-        'corpus_id': corpus_id,
-        'batch_size': batch_size,
-    })
+    job = tasks.ml_results_delete.delay(corpus_id=corpus_id, element_id=element_id, batch_size=batch_size)
+    if settings.ARKINDEX_FEATURES['search']:
+        # Trigger a reindex afterwards to cleanup the deleted results
+        tasks.reindex_start.delay(corpus_id=corpus_id, element_id=element_id, depends_on=job)
 
 
 def corpus_delete(corpus: Union[Corpus, UUID, str],
@@ -115,8 +103,4 @@ def corpus_delete(corpus: Union[Corpus, UUID, str],
     else:
         corpus_id = str(corpus)
 
-    async_to_sync(get_channel_layer().send)(CORPUS_CHANNEL, {
-        'type': ACTION_CORPUS_DELETE,
-        'id': corpus_id,
-        'batch_size': batch_size,
-    })
+    tasks.corpus_delete.delay(corpus_id=corpus_id, batch_size=batch_size)
diff --git a/arkindex/project/urls.py b/arkindex/project/urls.py
index 3bae5bdfff655dbb457d738262091ed5d54c8c3f..84b4803a3bf3a468bdfd54a7a1bf34ec98a75363 100644
--- a/arkindex/project/urls.py
+++ b/arkindex/project/urls.py
@@ -16,6 +16,7 @@ urlpatterns = [
     path('ponos/v1/secret/<path:name>', PonosSecretDetails.as_view(), name='secret-details'),
     path('ponos/', include('ponos.urls')),
     path('admin/', admin.site.urls),
+    path('rq/', include('django_rq.urls')),
     # Frontend URLs the backend needs with django.urls.reverse
     # Link sent via email for password resets
     path('user/reset/<uidb64>/<token>/', frontend_view.as_view(), name='password_reset_confirm'),
diff --git a/base/requirements.txt b/base/requirements.txt
index f8a1a4d14cd6d2c1826c65f86185ed73d44bae52..7f79b7fa60eef8c1b8fefcb8f95d92ec69bf9897 100644
--- a/base/requirements.txt
+++ b/base/requirements.txt
@@ -2,7 +2,5 @@ boto3==1.15.16
 cryptography>=3.1
 Django==3.1.2
 elasticsearch==6.8.1
-hiredis==1.1.0
 lxml==4.5.2
 psycopg2-binary==2.8.6
-Twisted==19.7.0
diff --git a/requirements.txt b/requirements.txt
index 90f9b5542723607dfabaf8c3929eaea24535b61e..6889b22acf69e307cbb677308f17d22ad7cf8b58 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,12 +4,11 @@ apistar==0.7.2
 git+https://gitlab.com/teklia/apistar.git#egg=apistar
 arkindex-common==0.2.0
 git+https://gitlab.com/arkindex/common.git#egg=arkindex-common
-channels==2.4.0
-channels-redis==2.4.2
 django-admin-hstore-widget==1.1.0
 django-cors-headers==3.5.0
 django-enumfields==2.0.0
 django-redis==4.12.1
+django-rq==2.3.2
 djangorestframework==3.11.1
 elasticsearch-dsl>=6.0.0,<7.0.0
 gitpython==3.1.9
diff --git a/tests-requirements.txt b/tests-requirements.txt
index 3440f042d961b835b8ac0af61216e9f63d299856..bf028cc636308eea8f383790d20a9f339123bb93 100644
--- a/tests-requirements.txt
+++ b/tests-requirements.txt
@@ -1,5 +1,4 @@
 coverage==5.3
 django-nose==1.4.7
-mock==4.0.2
 responses==0.12.0
 tripoli==2.0.0