From 16ad1d9217f2bb417c2d6e80bd1dd662c74893e4 Mon Sep 17 00:00:00 2001
From: Erwan Rouchet <rouchet@teklia.com>
Date: Tue, 27 Jun 2023 13:03:05 +0000
Subject: [PATCH] Slay the Workflow

---
 arkindex/documents/fixtures/data.json         | 1865 +++++++++--------
 .../management/commands/build_fixtures.py     |   18 +-
 .../documents/management/commands/cleanup.py  |   87 +-
 arkindex/documents/tasks.py                   |    2 +
 .../documents/tests/commands/test_cleanup.py  |  291 +--
 .../tests/test_bulk_classification.py         |    6 +-
 .../tests/test_bulk_element_transcriptions.py |    6 +-
 .../documents/tests/test_bulk_elements.py     |    6 +-
 .../tests/test_bulk_transcription_entities.py |    6 +-
 .../tests/test_bulk_transcriptions.py         |    6 +-
 .../documents/tests/test_classification.py    |    6 +-
 .../documents/tests/test_create_elements.py   |    6 +-
 .../tests/test_create_transcriptions.py       |    6 +-
 arkindex/documents/tests/test_entities_api.py |   12 +-
 arkindex/documents/tests/test_metadata.py     |   10 +-
 .../documents/tests/test_retrieve_elements.py |    2 +-
 arkindex/images/tests/test_image_api.py       |    4 +-
 arkindex/ponos/admin.py                       |   46 +-
 arkindex/ponos/api.py                         |   69 +-
 arkindex/ponos/authentication.py              |   17 +-
 arkindex/ponos/migrations/0001_initial.py     |    4 +-
 .../ponos/migrations/0002_task_process.py     |   57 +
 .../ponos/migrations/0003_remove_workflow.py  |   31 +
 arkindex/ponos/models.py                      |  247 +--
 arkindex/ponos/permissions.py                 |   10 +-
 arkindex/ponos/serializers.py                 |   80 +-
 arkindex/ponos/tests/test_api.py              |  476 ++---
 arkindex/ponos/tests/test_models.py           |   66 +-
 .../ponos/tests/test_tasks_attribution.py     |   37 +-
 arkindex/ponos/tests/test_workflow.py         |  135 --
 arkindex/ponos/utils.py                       |    2 +-
 arkindex/ponos/validators.py                  |   28 -
 arkindex/process/admin.py                     |    8 +-
 arkindex/process/api.py                       |  329 +--
 .../0004_process_farm_started_finished.py     |   51 +
 .../migrations/0005_migrate_workflows.py      |  117 ++
 .../0006_remove_process_workflow.py           |   21 +
 arkindex/process/models.py                    |  287 ++-
 arkindex/process/serializers/imports.py       |  170 +-
 arkindex/process/serializers/workers.py       |    2 +-
 arkindex/process/signals.py                   |    2 +-
 .../process/tests/test_create_s3_import.py    |   10 +-
 .../tests/test_create_training_process.py     |   24 +-
 arkindex/process/tests/test_datafile_api.py   |    2 +-
 ...kflows_api.py => test_element_workflow.py} |   85 +-
 .../process/tests/test_gitlab_provider.py     |    3 +-
 .../process/tests/test_process_elements.py    |    2 +-
 arkindex/process/tests/test_processes.py      | 1115 +++++++---
 arkindex/process/tests/test_repos.py          |   15 +-
 arkindex/process/tests/test_signals.py        |    4 +-
 arkindex/process/tests/test_templates.py      |   10 +-
 .../process/tests/test_transkribus_import.py  |    9 +-
 arkindex/process/tests/test_workeractivity.py |    4 +-
 arkindex/process/tests/test_workerruns.py     |  145 +-
 arkindex/process/tests/test_workers.py        |   16 +-
 arkindex/project/api_v1.py                    |    6 +-
 arkindex/project/tests/test_ponos_view.py     |   75 +-
 arkindex/project/validators.py                |   28 +
 arkindex/sql_validation/corpus_delete.sql     |   10 +-
 .../corpus_delete_top_level_type.sql          |   10 +-
 .../process_elements_filter_type.sql          |    4 +-
 .../process_elements_top_level.sql            |    4 +-
 .../process_elements_with_image.sql           |    4 +-
 arkindex/training/tests/test_datasets_api.py  |    3 +-
 arkindex/users/models.py                      |    2 +-
 65 files changed, 3295 insertions(+), 2926 deletions(-)
 create mode 100644 arkindex/ponos/migrations/0002_task_process.py
 create mode 100644 arkindex/ponos/migrations/0003_remove_workflow.py
 delete mode 100644 arkindex/ponos/tests/test_workflow.py
 delete mode 100644 arkindex/ponos/validators.py
 create mode 100644 arkindex/process/migrations/0004_process_farm_started_finished.py
 create mode 100644 arkindex/process/migrations/0005_migrate_workflows.py
 create mode 100644 arkindex/process/migrations/0006_remove_process_workflow.py
 rename arkindex/process/tests/{test_workflows_api.py => test_element_workflow.py} (92%)

diff --git a/arkindex/documents/fixtures/data.json b/arkindex/documents/fixtures/data.json
index 34f28a1d1c..82bb0b5aed 100644
--- a/arkindex/documents/fixtures/data.json
+++ b/arkindex/documents/fixtures/data.json
@@ -1,17 +1,19 @@
 [
 {
     "model": "process.process",
-    "pk": "238374b2-2923-4681-8a28-815243c0d7a7",
+    "pk": "258f38c2-7903-4a43-b7f9-4433dba57c80",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "name": null,
-        "creator": 1,
+        "creator": 2,
         "corpus": null,
         "mode": "local",
         "revision": null,
-        "workflow": null,
         "activity_state": "disabled",
+        "started": null,
+        "finished": null,
+        "farm": "2baf9bee-0b5e-479d-bf68-c0cea79b025f",
         "element": null,
         "folder_type": null,
         "element_type": null,
@@ -32,17 +34,19 @@
 },
 {
     "model": "process.process",
-    "pk": "70c59b0c-4172-4516-bd2a-615cd8b1bbba",
+    "pk": "2faf86ae-d27d-4b1a-b790-1d2067656c71",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "name": "Process fixture",
         "creator": 2,
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
         "mode": "workers",
         "revision": null,
-        "workflow": null,
         "activity_state": "disabled",
+        "started": null,
+        "finished": null,
+        "farm": "2baf9bee-0b5e-479d-bf68-c0cea79b025f",
         "element": null,
         "folder_type": null,
         "element_type": null,
@@ -63,17 +67,52 @@
 },
 {
     "model": "process.process",
-    "pk": "72423407-07b4-4736-9a68-af20a4d3e6b2",
+    "pk": "49ca1236-f0ca-4e5c-a733-0172663be1b4",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "name": null,
-        "creator": 2,
+        "creator": 1,
+        "corpus": null,
+        "mode": "repository",
+        "revision": null,
+        "activity_state": "disabled",
+        "started": null,
+        "finished": null,
+        "farm": "6dfc651f-c7c2-4e50-9306-c9de3ceeb7b4",
+        "element": null,
+        "folder_type": null,
+        "element_type": null,
+        "name_contains": null,
+        "load_children": false,
+        "collection_id": null,
+        "use_cache": false,
+        "use_gpu": false,
+        "template": null,
+        "bucket_name": null,
+        "prefix": null,
+        "model": null,
+        "train_folder": null,
+        "validation_folder": null,
+        "test_folder": null,
+        "files": []
+    }
+},
+{
+    "model": "process.process",
+    "pk": "aa1cc3fd-bdb2-45ce-84a8-b5ef0fb80dc0",
+    "fields": {
+        "created": "2020-02-02T01:23:45.678Z",
+        "updated": "2020-02-02T01:23:45.678Z",
+        "name": null,
+        "creator": 1,
         "corpus": null,
         "mode": "local",
         "revision": null,
-        "workflow": null,
         "activity_state": "disabled",
+        "started": null,
+        "finished": null,
+        "farm": "2baf9bee-0b5e-479d-bf68-c0cea79b025f",
         "element": null,
         "folder_type": null,
         "element_type": null,
@@ -94,29 +133,29 @@
 },
 {
     "model": "process.repository",
-    "pk": "2783bfae-a918-4807-b230-3daf7a3b64df",
+    "pk": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
     "fields": {
         "url": "http://my_repo.fake/workers/worker",
         "hook_token": "worker-hook-token",
-        "credentials": "0847cf6f-d9d0-4044-9760-8139e2c3a91b"
+        "credentials": "1332d2da-50bc-49ab-9084-e75327d40fa1"
     }
 },
 {
     "model": "process.repository",
-    "pk": "863afa60-83b9-48e1-9ad2-a3273cc580fd",
+    "pk": "d4c6aad8-c8c3-4ebe-8e8d-c689823b601a",
     "fields": {
         "url": "http://gitlab/repo",
         "hook_token": "hook-token",
-        "credentials": "0847cf6f-d9d0-4044-9760-8139e2c3a91b"
+        "credentials": "1332d2da-50bc-49ab-9084-e75327d40fa1"
     }
 },
 {
     "model": "process.revision",
-    "pk": "31cfe573-224a-40ab-baab-dcdac7723aae",
+    "pk": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "repo": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "repo": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "hash": "1337",
         "message": "My w0rk3r",
         "author": "Test user"
@@ -124,11 +163,11 @@
 },
 {
     "model": "process.revision",
-    "pk": "358d8bea-20d0-446b-a6ec-e8362be06b94",
+    "pk": "b9fcad70-d103-4527-b277-0c6c4cfb9d3b",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "repo": "863afa60-83b9-48e1-9ad2-a3273cc580fd",
+        "repo": "d4c6aad8-c8c3-4ebe-8e8d-c689823b601a",
         "hash": "42",
         "message": "Salve",
         "author": "Some user"
@@ -136,62 +175,62 @@
 },
 {
     "model": "process.worker",
-    "pk": "05431e16-035c-4ef9-ba1e-98bd55e94be3",
+    "pk": "35d0c448-307a-4b58-b2f1-d4d954281db6",
     "fields": {
-        "name": "File import",
-        "slug": "file_import",
-        "type": "d97dcfa2-80a4-4628-8ef4-41df6e265ca6",
-        "repository": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "name": "Worker requiring a GPU",
+        "slug": "worker-gpu",
+        "type": "d4e66eaa-f4d8-4d8a-8aeb-402f56b07572",
+        "repository": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "public": false
     }
 },
 {
     "model": "process.worker",
-    "pk": "59e36945-eca1-4686-a9e6-f279e11ba2f3",
+    "pk": "595930b6-8802-4337-b8b0-89f2b34421bd",
     "fields": {
-        "name": "Generic worker with a Model",
-        "slug": "generic",
-        "type": "339b2fe7-786d-4c81-8fdf-fd4a59da85a2",
-        "repository": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "name": "File import",
+        "slug": "file_import",
+        "type": "48e3b6f3-93f6-457d-894c-ae02ee64d7ba",
+        "repository": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "public": false
     }
 },
 {
     "model": "process.worker",
-    "pk": "682ef63b-e42b-4049-aee3-0a8671bc5562",
+    "pk": "a3d5a9f9-cd37-4de3-8029-040d1ad120fc",
     "fields": {
-        "name": "Worker requiring a GPU",
-        "slug": "worker-gpu",
-        "type": "5d22605c-d49d-475b-b40e-7f2618f9a5ac",
-        "repository": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "name": "Document layout analyser",
+        "slug": "dla",
+        "type": "b1d40fe5-76d2-495c-8f7b-bad52c18d0c4",
+        "repository": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "public": false
     }
 },
 {
     "model": "process.worker",
-    "pk": "6ae414e7-dec6-44c7-92cb-63e2d1e8c73a",
+    "pk": "c2b5ac86-4d83-4594-96df-f3acf2a2be6e",
     "fields": {
-        "name": "Document layout analyser",
-        "slug": "dla",
-        "type": "b4751154-cb33-4f51-b7cc-cd4e6063290c",
-        "repository": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "name": "Generic worker with a Model",
+        "slug": "generic",
+        "type": "46571a1c-33a4-4fd7-963e-dad72026372b",
+        "repository": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "public": false
     }
 },
 {
     "model": "process.worker",
-    "pk": "fa9d5344-b669-464f-89ab-c9313d5d3778",
+    "pk": "d0fdbd79-5e5f-441a-821f-1a0d69e601eb",
     "fields": {
         "name": "Recognizer",
         "slug": "reco",
-        "type": "339b2fe7-786d-4c81-8fdf-fd4a59da85a2",
-        "repository": "2783bfae-a918-4807-b230-3daf7a3b64df",
+        "type": "46571a1c-33a4-4fd7-963e-dad72026372b",
+        "repository": "3ff3363d-643d-4995-bb32-c25118fcf1e5",
         "public": false
     }
 },
 {
     "model": "process.workertype",
-    "pk": "339b2fe7-786d-4c81-8fdf-fd4a59da85a2",
+    "pk": "46571a1c-33a4-4fd7-963e-dad72026372b",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
@@ -201,17 +240,17 @@
 },
 {
     "model": "process.workertype",
-    "pk": "5d22605c-d49d-475b-b40e-7f2618f9a5ac",
+    "pk": "48e3b6f3-93f6-457d-894c-ae02ee64d7ba",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "slug": "worker",
-        "display_name": "Worker requiring a GPU"
+        "slug": "import",
+        "display_name": "Import"
     }
 },
 {
     "model": "process.workertype",
-    "pk": "b4751154-cb33-4f51-b7cc-cd4e6063290c",
+    "pk": "b1d40fe5-76d2-495c-8f7b-bad52c18d0c4",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
@@ -221,143 +260,143 @@
 },
 {
     "model": "process.workertype",
-    "pk": "d97dcfa2-80a4-4628-8ef4-41df6e265ca6",
+    "pk": "d4e66eaa-f4d8-4d8a-8aeb-402f56b07572",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "slug": "import",
-        "display_name": "Import"
+        "slug": "worker",
+        "display_name": "Worker requiring a GPU"
     }
 },
 {
     "model": "process.workerversion",
-    "pk": "54a463e5-2143-4043-96d3-5988919c387d",
+    "pk": "22d50ee5-45b8-485c-a87b-55d4d80d1130",
     "fields": {
-        "worker": "05431e16-035c-4ef9-ba1e-98bd55e94be3",
-        "revision": "31cfe573-224a-40ab-baab-dcdac7723aae",
-        "configuration": {},
+        "worker": "35d0c448-307a-4b58-b2f1-d4d954281db6",
+        "revision": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
+        "configuration": {
+            "test": 42
+        },
         "state": "available",
-        "gpu_usage": "disabled",
+        "gpu_usage": "required",
         "model_usage": false,
-        "docker_image": "064fed90-4f2b-400a-884e-06c280f41228",
+        "docker_image": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
         "docker_image_iid": null
     }
 },
 {
     "model": "process.workerversion",
-    "pk": "6f6a2ca0-cf2e-4092-bb55-815cf210c5d5",
+    "pk": "4fe2d7f3-66fb-4683-a4fc-7c7500b47d3f",
     "fields": {
-        "worker": "682ef63b-e42b-4049-aee3-0a8671bc5562",
-        "revision": "31cfe573-224a-40ab-baab-dcdac7723aae",
+        "worker": "a3d5a9f9-cd37-4de3-8029-040d1ad120fc",
+        "revision": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
         "configuration": {
             "test": 42
         },
         "state": "available",
-        "gpu_usage": "required",
+        "gpu_usage": "disabled",
         "model_usage": false,
-        "docker_image": "064fed90-4f2b-400a-884e-06c280f41228",
+        "docker_image": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
         "docker_image_iid": null
     }
 },
 {
     "model": "process.workerversion",
-    "pk": "8de39620-ba76-4ef7-84f4-d4fde93d1b49",
+    "pk": "d7bc21b9-5740-454e-b014-0d19ec40d537",
     "fields": {
-        "worker": "59e36945-eca1-4686-a9e6-f279e11ba2f3",
-        "revision": "31cfe573-224a-40ab-baab-dcdac7723aae",
-        "configuration": {
-            "test": 42
-        },
+        "worker": "595930b6-8802-4337-b8b0-89f2b34421bd",
+        "revision": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
+        "configuration": {},
         "state": "available",
         "gpu_usage": "disabled",
-        "model_usage": true,
-        "docker_image": "064fed90-4f2b-400a-884e-06c280f41228",
+        "model_usage": false,
+        "docker_image": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
         "docker_image_iid": null
     }
 },
 {
     "model": "process.workerversion",
-    "pk": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+    "pk": "dc35837e-3bf2-47ee-a9d8-6ba72b3db15f",
     "fields": {
-        "worker": "fa9d5344-b669-464f-89ab-c9313d5d3778",
-        "revision": "31cfe573-224a-40ab-baab-dcdac7723aae",
+        "worker": "c2b5ac86-4d83-4594-96df-f3acf2a2be6e",
+        "revision": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
         "configuration": {
             "test": 42
         },
         "state": "available",
         "gpu_usage": "disabled",
-        "model_usage": false,
-        "docker_image": "064fed90-4f2b-400a-884e-06c280f41228",
+        "model_usage": true,
+        "docker_image": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
         "docker_image_iid": null
     }
 },
 {
     "model": "process.workerversion",
-    "pk": "c3785411-88ca-4aea-9906-9c052434bda7",
+    "pk": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
     "fields": {
-        "worker": "6ae414e7-dec6-44c7-92cb-63e2d1e8c73a",
-        "revision": "31cfe573-224a-40ab-baab-dcdac7723aae",
+        "worker": "d0fdbd79-5e5f-441a-821f-1a0d69e601eb",
+        "revision": "3a770de7-f830-4ea7-b371-a3dc355fd6ef",
         "configuration": {
             "test": 42
         },
         "state": "available",
         "gpu_usage": "disabled",
         "model_usage": false,
-        "docker_image": "064fed90-4f2b-400a-884e-06c280f41228",
+        "docker_image": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
         "docker_image_iid": null
     }
 },
 {
     "model": "process.workerrun",
-    "pk": "2a526964-de1f-4ebc-af6c-ffdf36cdb8af",
+    "pk": "5bed9cd9-9083-48f3-aec5-fb0b2c466c05",
     "fields": {
-        "process": "70c59b0c-4172-4516-bd2a-615cd8b1bbba",
-        "version": "c3785411-88ca-4aea-9906-9c052434bda7",
+        "process": "aa1cc3fd-bdb2-45ce-84a8-b5ef0fb80dc0",
+        "version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "model_version": null,
         "parents": "[]",
         "configuration": null,
-        "summary": "Worker Document layout analyser @ c37854"
+        "summary": "Worker Recognizer @ fa43e3"
     }
 },
 {
     "model": "process.workerrun",
-    "pk": "2fe867f7-c01b-4061-acf7-9c3f0d7f0592",
+    "pk": "e7191a23-2073-4d36-8f65-d6bbc6d49716",
     "fields": {
-        "process": "238374b2-2923-4681-8a28-815243c0d7a7",
-        "version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "process": "2faf86ae-d27d-4b1a-b790-1d2067656c71",
+        "version": "4fe2d7f3-66fb-4683-a4fc-7c7500b47d3f",
         "model_version": null,
         "parents": "[]",
         "configuration": null,
-        "summary": "Worker Recognizer @ 9c2e08"
+        "summary": "Worker Document layout analyser @ 4fe2d7"
     }
 },
 {
     "model": "process.workerrun",
-    "pk": "395d9cb8-2436-4046-bdb3-f5758cf53998",
+    "pk": "caa14702-381f-40a5-897d-ae530ec21b42",
     "fields": {
-        "process": "70c59b0c-4172-4516-bd2a-615cd8b1bbba",
-        "version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "process": "2faf86ae-d27d-4b1a-b790-1d2067656c71",
+        "version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "model_version": null,
-        "parents": "[\"2a526964-de1f-4ebc-af6c-ffdf36cdb8af\"]",
+        "parents": "[\"e7191a23-2073-4d36-8f65-d6bbc6d49716\"]",
         "configuration": null,
-        "summary": "Worker Recognizer @ 9c2e08"
+        "summary": "Worker Recognizer @ fa43e3"
     }
 },
 {
     "model": "process.workerrun",
-    "pk": "d79a9a02-f21a-4f26-b501-ec53407c1776",
+    "pk": "f470f66d-6831-47ae-b311-95f3ad042933",
     "fields": {
-        "process": "72423407-07b4-4736-9a68-af20a4d3e6b2",
-        "version": "c3785411-88ca-4aea-9906-9c052434bda7",
+        "process": "258f38c2-7903-4a43-b7f9-4433dba57c80",
+        "version": "4fe2d7f3-66fb-4683-a4fc-7c7500b47d3f",
         "model_version": null,
         "parents": "[]",
         "configuration": null,
-        "summary": "Worker Document layout analyser @ c37854"
+        "summary": "Worker Document layout analyser @ 4fe2d7"
     }
 },
 {
     "model": "documents.corpus",
-    "pk": "eec95f5e-970a-4df3-8334-d8725a87065c",
+    "pk": "21169dab-89f7-4865-af79-c9afb6b31a0c",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
@@ -370,64 +409,64 @@
 },
 {
     "model": "documents.elementtype",
-    "pk": "0d1c9513-e155-4501-be0e-b77287595cb7",
+    "pk": "570a2662-014c-462b-8eff-33d86f3695d0",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "slug": "text_line",
-        "display_name": "Line",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "slug": "word",
+        "display_name": "Word",
         "folder": false,
         "indexable": false
     }
 },
 {
     "model": "documents.elementtype",
-    "pk": "238ad0b1-abe7-42f3-a506-f3378dd55580",
+    "pk": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "slug": "word",
-        "display_name": "Word",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "slug": "surface",
+        "display_name": "Surface",
         "folder": false,
         "indexable": false
     }
 },
 {
     "model": "documents.elementtype",
-    "pk": "615c7b49-0b94-41f6-9605-8270f3851c65",
+    "pk": "de898e3f-8967-4efd-bdd2-117203182331",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "slug": "act",
-        "display_name": "Act",
-        "folder": false,
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "slug": "volume",
+        "display_name": "Volume",
+        "folder": true,
         "indexable": false
     }
 },
 {
     "model": "documents.elementtype",
-    "pk": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
+    "pk": "e27340c9-73ed-410e-92aa-bb868399f56a",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "slug": "surface",
-        "display_name": "Surface",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "slug": "act",
+        "display_name": "Act",
         "folder": false,
         "indexable": false
     }
 },
 {
     "model": "documents.elementtype",
-    "pk": "94c5d8e6-fcce-46b4-93fd-d96503b9b22e",
+    "pk": "e3dbb7b9-2b74-4b59-9385-4c659cdf32f3",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "slug": "volume",
-        "display_name": "Volume",
-        "folder": true,
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "slug": "text_line",
+        "display_name": "Line",
+        "folder": false,
         "indexable": false
     }
 },
 {
     "model": "documents.elementtype",
-    "pk": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
+    "pk": "eab8395b-abda-40c3-af74-ad7456803445",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
         "slug": "page",
         "display_name": "Page",
         "folder": false,
@@ -436,279 +475,279 @@
 },
 {
     "model": "documents.elementpath",
-    "pk": "0a71560e-8410-4179-b9b6-0aad013f8c35",
+    "pk": "0562a356-b118-466c-9565-d8c5575b796f",
     "fields": {
-        "element": "32fcd77c-74de-47f7-ac85-cefc592b3589",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 6
+        "element": "c4d8ebc2-c85b-4f9f-b077-0a9983ac7713",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 5
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "0dba0f7b-5250-4c61-9a5b-4a81dfddb90f",
+    "pk": "15ffc8fc-d940-463f-affd-fed4a882312c",
     "fields": {
-        "element": "dd43bc89-46d8-4382-8cb4-b37ba2502efe",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 3
+        "element": "ce6bd019-f257-45ff-9870-f3e4c0b7c257",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"f67f1916-fdba-4071-aca0-3d0eb6af88eb\"]",
+        "ordering": 2
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "1af30c97-acf0-48cf-a05a-c237b8d3b1d0",
+    "pk": "169b6b93-3b3d-4046-8062-fa068d619cd1",
     "fields": {
-        "element": "f0cbb530-ae40-49c9-94c1-4112db294218",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 5
+        "element": "c247dccc-24f7-45d8-af2d-21d995d96a06",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "1d96b5ff-8cd6-4ed0-adf4-c8fbf1694c1a",
+    "pk": "286c43a0-7296-4a76-abeb-88dc417ba197",
     "fields": {
-        "element": "86caa547-d859-4587-b73b-0f3bd328e302",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"468bcf72-d74e-4e0c-a127-936431ba5d0c\"]",
-        "ordering": 0
+        "element": "e5edd024-8f45-4f4f-8536-8d34f814f0f9",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"c247dccc-24f7-45d8-af2d-21d995d96a06\"]",
+        "ordering": 2
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "2c060274-1efc-4012-b39d-9aa0d952cb62",
+    "pk": "2d3654df-122b-4282-8660-00e1b56e4488",
     "fields": {
-        "element": "a11d7c51-9292-493e-92cb-f96a4f74c21f",
-        "path": "[\"b54df16f-de69-4557-a991-35fafff03cca\"]",
-        "ordering": 1
+        "element": "992031d3-55ce-43a3-9b1e-c7983164e997",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"ae16d416-d565-48ae-a666-576aa1b6d11e\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "31253046-a5ac-41f8-92ec-ee4d88fcf0c7",
+    "pk": "3638c3b4-9a17-4ba9-8817-8fa2e040047d",
     "fields": {
-        "element": "a0f87aee-596c-404c-bdce-e054eae40ed2",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"1300f52d-1be6-48ed-b2b0-58e11173bfff\"]",
+        "element": "e617bb28-7e57-47b2-9f69-49c3db60b0e7",
+        "path": "[\"a2777875-d38d-4b58-bb61-011b93ad9d03\"]",
         "ordering": 2
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "39bc3405-5841-493b-bb62-ae0481f0191e",
+    "pk": "39c5ea10-b323-415a-9f52-4ea5db4888c9",
     "fields": {
-        "element": "01ef9dd2-358b-4dfa-bca9-9052991e3896",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"468bcf72-d74e-4e0c-a127-936431ba5d0c\"]",
-        "ordering": 1
+        "element": "cec5365b-d23d-44f2-ab93-6f1c496b92b5",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 4
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "46597823-9ee7-45d1-8f55-fb7342064ad6",
+    "pk": "3babedc3-b3fa-461c-ad03-4f469edf3e63",
     "fields": {
-        "element": "bdb16202-9d53-4e00-b28b-2f685f9211b7",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"9b57bb2f-e849-4a58-9f08-a35af64574ca\"]",
-        "ordering": 0
+        "element": "eceecbd4-588d-4a80-83f2-dc350eb8ea8c",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"f67f1916-fdba-4071-aca0-3d0eb6af88eb\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "552b0e00-ca53-4183-88ab-332ca50d507d",
+    "pk": "3e341b13-5c10-4428-8507-f720af70dbb4",
     "fields": {
-        "element": "82018759-4bb9-448e-9407-38d76395e854",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"f0cbb530-ae40-49c9-94c1-4112db294218\"]",
+        "element": "9e1e666c-688e-4660-9972-3f5c00d0d8e8",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"5981fd36-3ac7-41ea-9221-7f431d6492e2\"]",
         "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "558a7024-e8f3-48d0-9db3-8fe2dbb05e61",
+    "pk": "4ca114c6-9467-4d99-9093-754a40a69947",
     "fields": {
-        "element": "a4cab557-243d-4943-bc42-d4035933dc8e",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"32fcd77c-74de-47f7-ac85-cefc592b3589\"]",
-        "ordering": 0
+        "element": "1c15c222-bc0b-410e-a721-efd8aef45d5a",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"ae16d416-d565-48ae-a666-576aa1b6d11e\"]",
+        "ordering": 2
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "5f8a816f-673c-411c-8f59-d283aea63473",
+    "pk": "4ea14104-5827-4826-853f-5d4e8d91f81b",
     "fields": {
-        "element": "de4f6c4a-ea3c-4733-a92b-f6896abce000",
-        "path": "[]",
-        "ordering": 0
+        "element": "3aa35670-cea9-4913-a524-8c0eff63a042",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"cec5365b-d23d-44f2-ab93-6f1c496b92b5\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "616a8b29-75b2-43a1-9944-349a30edf038",
+    "pk": "5b3e05c4-43c9-4bf6-a16f-75e451e29eb5",
     "fields": {
-        "element": "468bcf72-d74e-4e0c-a127-936431ba5d0c",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 4
+        "element": "1475dc86-17fa-4041-834e-22ad2b5f9fcc",
+        "path": "[\"a2777875-d38d-4b58-bb61-011b93ad9d03\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "63b9101d-aa3b-420a-8818-42689837214a",
+    "pk": "648838a9-3b40-432a-807b-d79bf18f6464",
     "fields": {
-        "element": "149466d4-3fdd-40a1-8dc5-6536b703abae",
-        "path": "[\"b54df16f-de69-4557-a991-35fafff03cca\"]",
-        "ordering": 0
+        "element": "a315f9f0-5fa0-4fd5-8932-e775dc61e0f6",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"ae16d416-d565-48ae-a666-576aa1b6d11e\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "7f044f2f-8239-4b1f-9cc9-2c259909b661",
+    "pk": "69dcb3ce-0c9e-4b9a-8193-860fcc5f3203",
     "fields": {
-        "element": "d42ff052-acd0-4034-b767-ddd243d495c1",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 2
+        "element": "c2891e27-acb1-4738-87fb-c7006669cc5f",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"c247dccc-24f7-45d8-af2d-21d995d96a06\"]",
+        "ordering": 1
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "82531000-2140-41de-80a1-89b89361f9a4",
+    "pk": "6b43390b-29b4-413c-8f3c-7c37a580df0b",
     "fields": {
-        "element": "0c6d958b-961b-4925-90be-ae3fa2b041c3",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"1300f52d-1be6-48ed-b2b0-58e11173bfff\"]",
+        "element": "a97097d1-589e-460c-95ac-38b347296fc1",
+        "path": "[\"a2777875-d38d-4b58-bb61-011b93ad9d03\"]",
         "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "84eb9d17-b49d-4704-b2b4-505b6def89b6",
+    "pk": "738a3890-9c7e-4cd1-9cb0-5a2bc424bb69",
     "fields": {
-        "element": "ec5531c3-e7fd-40eb-964e-ccb88a4d64bf",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"1300f52d-1be6-48ed-b2b0-58e11173bfff\"]",
-        "ordering": 3
+        "element": "86685f29-4c1c-430d-9a02-33e2f030d8a4",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 6
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "906d9448-1994-401d-b0a3-fa93f3fdf4a9",
+    "pk": "7c312f17-84ce-4e69-b616-b7f0324302a8",
     "fields": {
-        "element": "b54df16f-de69-4557-a991-35fafff03cca",
-        "path": "[]",
-        "ordering": 0
+        "element": "5981fd36-3ac7-41ea-9221-7f431d6492e2",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 3
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "95370d5c-de9c-4d5f-a815-ff976d55d3f9",
+    "pk": "8b7f12ed-b908-4cbf-90d7-e9b90ecc62a2",
     "fields": {
-        "element": "bbe255a7-420a-4d52-b909-5f4296bb32ba",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"9b57bb2f-e849-4a58-9f08-a35af64574ca\"]",
-        "ordering": 1
+        "element": "9e2cfdf7-efb2-4825-83d9-283e8c11cdf1",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"86685f29-4c1c-430d-9a02-33e2f030d8a4\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "9d5eadfd-54ef-45c4-a84b-d0e4dd702878",
+    "pk": "8fc341ce-49e5-45cf-8172-bed132911af2",
     "fields": {
-        "element": "1c02819e-12a4-4bce-a2e1-32d81c17d215",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"924a1ca4-2cab-4353-a845-85fbd6b93a21\"]",
-        "ordering": 0
+        "element": "6721bf88-f4a0-4b2e-b601-88fd2128aea6",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"f67f1916-fdba-4071-aca0-3d0eb6af88eb\"]",
+        "ordering": 3
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "b26996ae-cdbb-4c09-b694-d12e7272376e",
+    "pk": "9b791c62-193e-438b-8fe1-d0bf360c2090",
     "fields": {
-        "element": "d0345827-20f8-4c1d-91e8-2d4a81a44da4",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"dd43bc89-46d8-4382-8cb4-b37ba2502efe\"]",
-        "ordering": 0
+        "element": "028caaee-635f-4098-b63a-4af444c1e4a6",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 7
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "b8df8073-af9e-48b3-af3a-1d3ffd65514a",
+    "pk": "a8f10b69-98aa-4198-9366-d6d97f8fff0b",
     "fields": {
-        "element": "1300f52d-1be6-48ed-b2b0-58e11173bfff",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
+        "element": "50a2c8c8-1616-4b44-9d66-75fcb9522d2d",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"c247dccc-24f7-45d8-af2d-21d995d96a06\"]",
         "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "bd7137b5-789f-4289-98a1-f60b2929d293",
+    "pk": "b3fcecb7-b7a8-4723-87cb-68f3f51d2912",
     "fields": {
-        "element": "cade6529-dd83-473e-bbb2-c124d5135483",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"d42ff052-acd0-4034-b767-ddd243d495c1\"]",
+        "element": "d666b066-3368-499c-8c10-cfcb48ef1b87",
+        "path": "[]",
         "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "d399c276-cd72-4890-8fbc-34b9e8d3d616",
+    "pk": "c81162c8-1437-4b98-a221-46d09164fed3",
     "fields": {
-        "element": "8abafa5f-73b4-4a1e-9a86-0cc67175a99c",
-        "path": "[\"b54df16f-de69-4557-a991-35fafff03cca\"]",
+        "element": "ae16d416-d565-48ae-a666-576aa1b6d11e",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
         "ordering": 2
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "d5ca5107-de3b-44f4-9197-813ef15e94bb",
+    "pk": "ce632009-02c0-4666-b814-77e3d1239cad",
     "fields": {
-        "element": "ee3b7dd1-1723-4a7c-9415-db2b5c07e1f4",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"d42ff052-acd0-4034-b767-ddd243d495c1\"]",
-        "ordering": 2
+        "element": "e3ec7b0a-ecfb-4df5-b046-5ad1f927ec3b",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"c4d8ebc2-c85b-4f9f-b077-0a9983ac7713\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "e339f466-e138-40f3-acea-9d1688e9d262",
+    "pk": "d0a48eea-1306-4bc4-b5b0-da0ee5e216d9",
     "fields": {
-        "element": "7dc26664-f7fc-4d6f-b46c-c1e502fbd403",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"9b57bb2f-e849-4a58-9f08-a35af64574ca\"]",
-        "ordering": 2
+        "element": "da747b7d-3140-4010-b237-57381dd0a90f",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"f67f1916-fdba-4071-aca0-3d0eb6af88eb\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "eec28c4f-5ac8-4cbe-878d-271ad4e62d3e",
+    "pk": "e9b13e65-88f6-4ab7-b232-5d6b8c1dd9bf",
     "fields": {
-        "element": "9b57bb2f-e849-4a58-9f08-a35af64574ca",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 1
+        "element": "a2777875-d38d-4b58-bb61-011b93ad9d03",
+        "path": "[]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "fb687645-0c6b-420f-afc9-07127103ea86",
+    "pk": "edcb4aa4-bfdb-4eee-8b75-0adb1c7b2ea2",
     "fields": {
-        "element": "73d6a952-aba3-49e7-95fa-531329be182d",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"d42ff052-acd0-4034-b767-ddd243d495c1\"]",
-        "ordering": 1
+        "element": "f67f1916-fdba-4071-aca0-3d0eb6af88eb",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "fd43f454-8513-4626-932e-c1577174a89a",
+    "pk": "fbcf474b-0f14-4dcc-8ae4-16eaa4e33977",
     "fields": {
-        "element": "924a1ca4-2cab-4353-a845-85fbd6b93a21",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\"]",
-        "ordering": 7
+        "element": "d063146f-557c-4456-8e52-952218b3831a",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"028caaee-635f-4098-b63a-4af444c1e4a6\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.elementpath",
-    "pk": "ff7e3c30-d8e8-4bef-95e4-791cf3abe592",
+    "pk": "ff80f0cb-8fc1-4320-967a-400f11918b83",
     "fields": {
-        "element": "bbd05b78-a950-4974-ad12-4e7cf0cd8c08",
-        "path": "[\"de4f6c4a-ea3c-4733-a92b-f6896abce000\", \"1300f52d-1be6-48ed-b2b0-58e11173bfff\"]",
-        "ordering": 1
+        "element": "c976284f-b54e-49c0-a550-9ddfb116ed98",
+        "path": "[\"d666b066-3368-499c-8c10-cfcb48ef1b87\", \"cec5365b-d23d-44f2-ab93-6f1c496b92b5\"]",
+        "ordering": 0
     }
 },
 {
     "model": "documents.element",
-    "pk": "01ef9dd2-358b-4dfa-bca9-9052991e3896",
+    "pk": "028caaee-635f-4098-b63a-4af444c1e4a6",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface C",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e27340c9-73ed-410e-92aa-bb868399f56a",
+        "name": "Act 5",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
-        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
+        "image": null,
+        "polygon": null,
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -716,18 +755,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "0c6d958b-961b-4925-90be-ae3fa2b041c3",
+    "pk": "1475dc86-17fa-4041-834e-22ad2b5f9fcc",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "PARIS",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 2, page 1v",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
+        "image": "e6cb1e0e-f8ee-405c-9c3e-3085c22aab98",
+        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -735,18 +774,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "1300f52d-1be6-48ed-b2b0-58e11173bfff",
+    "pk": "1c15c222-bc0b-410e-a721-efd8aef45d5a",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 1, page 1r",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "DATUM",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -754,17 +793,17 @@
 },
 {
     "model": "documents.element",
-    "pk": "149466d4-3fdd-40a1-8dc5-6536b703abae",
+    "pk": "3aa35670-cea9-4913-a524-8c0eff63a042",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 2, page 1r",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface C",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "1589a5b4-9bed-4cf2-a415-bf0ed62b824d",
+        "image": "a807b209-c11d-431d-846f-5922fc7a9a44",
         "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
@@ -773,18 +812,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "1c02819e-12a4-4bce-a2e1-32d81c17d215",
+    "pk": "50a2c8c8-1616-4b44-9d66-75fcb9522d2d",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface F",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "PARIS",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (600 600, 600 1000, 1000 1000, 1000 600, 600 600)",
+        "image": "a807b209-c11d-431d-846f-5922fc7a9a44",
+        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -792,13 +831,13 @@
 },
 {
     "model": "documents.element",
-    "pk": "32fcd77c-74de-47f7-ac85-cefc592b3589",
+    "pk": "5981fd36-3ac7-41ea-9221-7f431d6492e2",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "615c7b49-0b94-41f6-9605-8270f3851c65",
-        "name": "Act 4",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e27340c9-73ed-410e-92aa-bb868399f56a",
+        "name": "Act 1",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
@@ -811,18 +850,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "468bcf72-d74e-4e0c-a127-936431ba5d0c",
+    "pk": "6721bf88-f4a0-4b2e-b601-88fd2128aea6",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "615c7b49-0b94-41f6-9605-8270f3851c65",
-        "name": "Act 2",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e3dbb7b9-2b74-4b59-9385-4c659cdf32f3",
+        "name": "Text line",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": null,
-        "polygon": null,
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -830,18 +869,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "73d6a952-aba3-49e7-95fa-531329be182d",
+    "pk": "86685f29-4c1c-430d-9a02-33e2f030d8a4",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "ROY",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e27340c9-73ed-410e-92aa-bb868399f56a",
+        "name": "Act 4",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
+        "image": null,
+        "polygon": null,
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -849,18 +888,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "7dc26664-f7fc-4d6f-b46c-c1e502fbd403",
+    "pk": "992031d3-55ce-43a3-9b1e-c7983164e997",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "DATUM",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "PARIS",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
-        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -868,18 +907,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "82018759-4bb9-448e-9407-38d76395e854",
+    "pk": "9e1e666c-688e-4660-9972-3f5c00d0d8e8",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface D",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface A",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (0 0, 0 300, 300 300, 300 0, 0 0)",
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (0 0, 0 600, 600 600, 600 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -887,18 +926,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "86caa547-d859-4587-b73b-0f3bd328e302",
+    "pk": "9e2cfdf7-efb2-4825-83d9-283e8c11cdf1",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface B",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface E",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (600 600, 600 1000, 1000 1000, 1000 600, 600 600)",
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (300 300, 300 600, 600 600, 600 300, 300 300)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -906,18 +945,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "8abafa5f-73b4-4a1e-9a86-0cc67175a99c",
+    "pk": "a2777875-d38d-4b58-bb61-011b93ad9d03",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 2, page 2r",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "de898e3f-8967-4efd-bdd2-117203182331",
+        "name": "Volume 2",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "1ae616c9-b268-4767-a4ff-f52b6ff11b1d",
-        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
+        "image": null,
+        "polygon": null,
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -925,18 +964,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "924a1ca4-2cab-4353-a845-85fbd6b93a21",
+    "pk": "a315f9f0-5fa0-4fd5-8932-e775dc61e0f6",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "615c7b49-0b94-41f6-9605-8270f3851c65",
-        "name": "Act 5",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "ROY",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": null,
-        "polygon": null,
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -944,17 +983,17 @@
 },
 {
     "model": "documents.element",
-    "pk": "9b57bb2f-e849-4a58-9f08-a35af64574ca",
+    "pk": "a97097d1-589e-460c-95ac-38b347296fc1",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 1, page 1v",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 2, page 1r",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
+        "image": "c51ad170-9f7f-433f-bce9-14c381affdc6",
         "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
@@ -963,18 +1002,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "a0f87aee-596c-404c-bdce-e054eae40ed2",
+    "pk": "ae16d416-d565-48ae-a666-576aa1b6d11e",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "DATUM",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 1, page 2r",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -982,17 +1021,17 @@
 },
 {
     "model": "documents.element",
-    "pk": "a11d7c51-9292-493e-92cb-f96a4f74c21f",
+    "pk": "c247dccc-24f7-45d8-af2d-21d995d96a06",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 2, page 1v",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 1, page 1v",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4a3eb08c-9d54-4166-b8f7-23cbe5a7eb22",
+        "image": "a807b209-c11d-431d-846f-5922fc7a9a44",
         "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
@@ -1001,18 +1040,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "a4cab557-243d-4943-bc42-d4035933dc8e",
+    "pk": "c2891e27-acb1-4738-87fb-c7006669cc5f",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface E",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "ROY",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (300 300, 300 600, 600 600, 600 300, 300 300)",
+        "image": "a807b209-c11d-431d-846f-5922fc7a9a44",
+        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1020,13 +1059,13 @@
 },
 {
     "model": "documents.element",
-    "pk": "b54df16f-de69-4557-a991-35fafff03cca",
+    "pk": "c4d8ebc2-c85b-4f9f-b077-0a9983ac7713",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "94c5d8e6-fcce-46b4-93fd-d96503b9b22e",
-        "name": "Volume 2",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e27340c9-73ed-410e-92aa-bb868399f56a",
+        "name": "Act 3",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
@@ -1039,18 +1078,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "bbd05b78-a950-4974-ad12-4e7cf0cd8c08",
+    "pk": "c976284f-b54e-49c0-a550-9ddfb116ed98",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "ROY",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface B",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (600 600, 600 1000, 1000 1000, 1000 600, 600 600)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1058,18 +1097,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "bbe255a7-420a-4d52-b909-5f4296bb32ba",
+    "pk": "ce6bd019-f257-45ff-9870-f3e4c0b7c257",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "ROY",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "DATUM",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
-        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1077,18 +1116,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "bdb16202-9d53-4e00-b28b-2f685f9211b7",
+    "pk": "cec5365b-d23d-44f2-ab93-6f1c496b92b5",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "PARIS",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "e27340c9-73ed-410e-92aa-bb868399f56a",
+        "name": "Act 2",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
-        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
+        "image": null,
+        "polygon": null,
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1096,18 +1135,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "cade6529-dd83-473e-bbb2-c124d5135483",
+    "pk": "d063146f-557c-4456-8e52-952218b3831a",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "PARIS",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface F",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (600 600, 600 1000, 1000 1000, 1000 600, 600 600)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1115,18 +1154,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "d0345827-20f8-4c1d-91e8-2d4a81a44da4",
+    "pk": "d666b066-3368-499c-8c10-cfcb48ef1b87",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "768928ba-70ea-4640-9ee4-71bcad33dcbb",
-        "name": "Surface A",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "de898e3f-8967-4efd-bdd2-117203182331",
+        "name": "Volume 1",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (0 0, 0 600, 600 600, 600 0, 0 0)",
+        "image": null,
+        "polygon": null,
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1134,18 +1173,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "d42ff052-acd0-4034-b767-ddd243d495c1",
+    "pk": "da747b7d-3140-4010-b237-57381dd0a90f",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "a00ab5a6-5f42-48c2-af02-2cfac0732743",
-        "name": "Volume 1, page 2r",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "PARIS",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (100 100, 100 200, 200 200, 200 100, 100 100)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1153,18 +1192,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "dd43bc89-46d8-4382-8cb4-b37ba2502efe",
+    "pk": "e3ec7b0a-ecfb-4df5-b046-5ad1f927ec3b",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "615c7b49-0b94-41f6-9605-8270f3851c65",
-        "name": "Act 1",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "ab5f418f-79b8-4d0a-94f4-34080ade0083",
+        "name": "Surface D",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": null,
-        "polygon": null,
+        "image": "cd977b64-409b-4d5e-96dc-2037057a7987",
+        "polygon": "LINEARRING (0 0, 0 300, 300 300, 300 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1172,18 +1211,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "de4f6c4a-ea3c-4733-a92b-f6896abce000",
+    "pk": "e5edd024-8f45-4f4f-8536-8d34f814f0f9",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "94c5d8e6-fcce-46b4-93fd-d96503b9b22e",
-        "name": "Volume 1",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "DATUM",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": null,
-        "polygon": null,
+        "image": "a807b209-c11d-431d-846f-5922fc7a9a44",
+        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1191,18 +1230,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "ec5531c3-e7fd-40eb-964e-ccb88a4d64bf",
+    "pk": "e617bb28-7e57-47b2-9f69-49c3db60b0e7",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "0d1c9513-e155-4501-be0e-b77287595cb7",
-        "name": "Text line",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 2, page 2r",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "b84876b7-0cc1-4351-88bf-72de33b9d319",
-        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
+        "image": "abfbf945-d1b7-4434-964b-3952cb55536f",
+        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1210,18 +1249,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "ee3b7dd1-1723-4a7c-9415-db2b5c07e1f4",
+    "pk": "eceecbd4-588d-4a80-83f2-dc350eb8ea8c",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "238ad0b1-abe7-42f3-a506-f3378dd55580",
-        "name": "DATUM",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "570a2662-014c-462b-8eff-33d86f3695d0",
+        "name": "ROY",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
-        "polygon": "LINEARRING (700 700, 700 800, 800 800, 800 700, 700 700)",
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (400 400, 400 500, 500 500, 500 400, 400 400)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1229,18 +1268,18 @@
 },
 {
     "model": "documents.element",
-    "pk": "f0cbb530-ae40-49c9-94c1-4112db294218",
+    "pk": "f67f1916-fdba-4071-aca0-3d0eb6af88eb",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "615c7b49-0b94-41f6-9605-8270f3851c65",
-        "name": "Act 3",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "eab8395b-abda-40c3-af74-ad7456803445",
+        "name": "Volume 1, page 1r",
         "creator": null,
         "worker_version": null,
         "worker_run": null,
-        "image": null,
-        "polygon": null,
+        "image": "cb548ca8-942e-4eca-95d5-580991857d08",
+        "polygon": "LINEARRING (0 0, 0 1000, 1000 1000, 1000 0, 0 0)",
         "rotation_angle": 0,
         "mirrored": false,
         "confidence": null
@@ -1248,55 +1287,55 @@
 },
 {
     "model": "documents.entitytype",
-    "pk": "2269f731-9d2c-44af-847c-c12d3704a410",
+    "pk": "2b3e8860-7520-46fe-aa99-c40392c715ca",
     "fields": {
-        "name": "person",
+        "name": "location",
         "color": "ff0000",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c"
     }
 },
 {
     "model": "documents.entitytype",
-    "pk": "4056b381-0dfc-40d9-89f1-2b4cf8f526c4",
+    "pk": "683ae3fd-d36d-40bd-bd5d-dcdff45c628a",
     "fields": {
-        "name": "number",
+        "name": "person",
         "color": "ff0000",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c"
     }
 },
 {
     "model": "documents.entitytype",
-    "pk": "55e6838f-9f61-4db5-bd05-2b05d0ed131e",
+    "pk": "89f24620-32ba-45db-95bd-6692d9f8dc9a",
     "fields": {
-        "name": "date",
+        "name": "number",
         "color": "ff0000",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c"
     }
 },
 {
     "model": "documents.entitytype",
-    "pk": "8e1d549b-98f4-4e69-b7e0-156ecfb4afa3",
+    "pk": "b188d92b-7679-4702-9238-a4ba0382f543",
     "fields": {
-        "name": "location",
+        "name": "date",
         "color": "ff0000",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c"
     }
 },
 {
     "model": "documents.entitytype",
-    "pk": "b6c6c03b-d7fe-457e-9111-8fd0fb0d2edb",
+    "pk": "efd14c6b-dcc4-4800-829e-3a1a80679962",
     "fields": {
         "name": "organization",
         "color": "ff0000",
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c"
     }
 },
 {
     "model": "documents.transcription",
-    "pk": "0ebb0eed-7138-4777-a5f5-532f9308c801",
+    "pk": "1c1fce2b-5798-4437-872a-93a59f7f2845",
     "fields": {
-        "element": "ee3b7dd1-1723-4a7c-9415-db2b5c07e1f4",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "e5edd024-8f45-4f4f-8536-8d34f814f0f9",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
         "text": "DATUM",
         "orientation": "horizontal-lr",
@@ -1305,10 +1344,10 @@
 },
 {
     "model": "documents.transcription",
-    "pk": "21560b66-a110-471f-9c09-5f94ce93927b",
+    "pk": "2704e9d4-624e-43e0-a1b3-e0efe02132bc",
     "fields": {
-        "element": "cade6529-dd83-473e-bbb2-c124d5135483",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "992031d3-55ce-43a3-9b1e-c7983164e997",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
         "text": "PARIS",
         "orientation": "horizontal-lr",
@@ -1317,10 +1356,10 @@
 },
 {
     "model": "documents.transcription",
-    "pk": "4f70e5a2-24f3-4b15-89c9-50d00958d05a",
+    "pk": "2761c554-5242-46b2-80bf-b26422cdd326",
     "fields": {
-        "element": "7dc26664-f7fc-4d6f-b46c-c1e502fbd403",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "ce6bd019-f257-45ff-9870-f3e4c0b7c257",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
         "text": "DATUM",
         "orientation": "horizontal-lr",
@@ -1329,34 +1368,34 @@
 },
 {
     "model": "documents.transcription",
-    "pk": "515f151f-9884-4f8f-9a3e-30d0c17740ba",
+    "pk": "54805d3e-1e45-41c7-9b93-34c6ca8a9248",
     "fields": {
-        "element": "73d6a952-aba3-49e7-95fa-531329be182d",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "f67f1916-fdba-4071-aca0-3d0eb6af88eb",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
-        "text": "ROY",
+        "text": "Lorem ipsum dolor sit amet",
         "orientation": "horizontal-lr",
         "confidence": 1.0
     }
 },
 {
     "model": "documents.transcription",
-    "pk": "5ef00fbd-4457-4731-82a1-8fe459bd0fdc",
+    "pk": "ae20bbdd-8c9a-4c4b-bb56-39c29ebbab30",
     "fields": {
-        "element": "a0f87aee-596c-404c-bdce-e054eae40ed2",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "c2891e27-acb1-4738-87fb-c7006669cc5f",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
-        "text": "DATUM",
+        "text": "ROY",
         "orientation": "horizontal-lr",
         "confidence": 1.0
     }
 },
 {
     "model": "documents.transcription",
-    "pk": "a098cc27-c0de-43f8-b873-2df13cc0a41e",
+    "pk": "c3999e7a-f5ac-4994-a5a7-82c30ccc4a23",
     "fields": {
-        "element": "bbe255a7-420a-4d52-b909-5f4296bb32ba",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "a315f9f0-5fa0-4fd5-8932-e775dc61e0f6",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
         "text": "ROY",
         "orientation": "horizontal-lr",
@@ -1365,34 +1404,34 @@
 },
 {
     "model": "documents.transcription",
-    "pk": "a0a4617e-0ffb-435d-bdaa-7251a1699c2e",
+    "pk": "d4bb12d0-2a0a-4356-a320-31f8bba88693",
     "fields": {
-        "element": "bbd05b78-a950-4974-ad12-4e7cf0cd8c08",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "da747b7d-3140-4010-b237-57381dd0a90f",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
-        "text": "ROY",
+        "text": "PARIS",
         "orientation": "horizontal-lr",
         "confidence": 1.0
     }
 },
 {
     "model": "documents.transcription",
-    "pk": "abc97f73-c5e8-41f3-a45f-a1c7c298ddc3",
+    "pk": "dec557e0-13c3-45b4-bcc6-7de0f057d0f0",
     "fields": {
-        "element": "1300f52d-1be6-48ed-b2b0-58e11173bfff",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "eceecbd4-588d-4a80-83f2-dc350eb8ea8c",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
-        "text": "Lorem ipsum dolor sit amet",
+        "text": "ROY",
         "orientation": "horizontal-lr",
         "confidence": 1.0
     }
 },
 {
     "model": "documents.transcription",
-    "pk": "b50f3a86-44e8-4f91-ae6f-ab46ba0d5b89",
+    "pk": "f3ba7207-8dfe-41c7-b2c0-fb7e8c20c9ca",
     "fields": {
-        "element": "bdb16202-9d53-4e00-b28b-2f685f9211b7",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "50a2c8c8-1616-4b44-9d66-75fcb9522d2d",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
         "text": "PARIS",
         "orientation": "horizontal-lr",
@@ -1401,51 +1440,51 @@
 },
 {
     "model": "documents.transcription",
-    "pk": "ed64eaca-4b01-49d8-80a6-bf29c5134f36",
+    "pk": "f93dc4d0-9c8b-4c25-821b-74d487f84aa0",
     "fields": {
-        "element": "0c6d958b-961b-4925-90be-ae3fa2b041c3",
-        "worker_version": "9c2e0892-4107-46e2-8ff9-749966c62d7d",
+        "element": "1c15c222-bc0b-410e-a721-efd8aef45d5a",
+        "worker_version": "fa43e3bc-291e-4e38-b975-4c03b279f0fc",
         "worker_run": null,
-        "text": "PARIS",
+        "text": "DATUM",
         "orientation": "horizontal-lr",
         "confidence": 1.0
     }
 },
 {
     "model": "documents.allowedmetadata",
-    "pk": "01b237fa-2caf-4476-bb1e-c7e858cc537a",
+    "pk": "55129bb9-4472-4c88-9fdf-eadcd50b15a2",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "text",
-        "name": "folio"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "location",
+        "name": "location"
     }
 },
 {
     "model": "documents.allowedmetadata",
-    "pk": "5e1587a7-47ed-4ede-8d79-f7678449dfb2",
+    "pk": "5e2d63cc-b000-4929-90ff-edad931beadd",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "type": "location",
-        "name": "location"
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "type": "text",
+        "name": "folio"
     }
 },
 {
     "model": "documents.allowedmetadata",
-    "pk": "c44fe2e3-4bda-4b07-bad5-b39b111f1a2b",
+    "pk": "c3d4c687-381b-4a7b-835e-716dc6716999",
     "fields": {
-        "corpus": "eec95f5e-970a-4df3-8334-d8725a87065c",
+        "corpus": "21169dab-89f7-4865-af79-c9afb6b31a0c",
         "type": "date",
         "name": "date"
     }
 },
 {
     "model": "documents.metadata",
-    "pk": "0cccc5f1-6d05-471a-8d83-a9b613aabcc8",
+    "pk": "2668368f-165f-4d34-8dfb-53acddb0dd5f",
     "fields": {
-        "element": "924a1ca4-2cab-4353-a845-85fbd6b93a21",
+        "element": "5981fd36-3ac7-41ea-9221-7f431d6492e2",
         "name": "number",
         "type": "text",
-        "value": "5",
+        "value": "1",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1453,12 +1492,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "0e784e1f-f900-4c67-b346-b6a4506d5cc9",
+    "pk": "2f6a2c89-eb17-4b15-b9bd-831f4d6fc6c4",
     "fields": {
-        "element": "149466d4-3fdd-40a1-8dc5-6536b703abae",
-        "name": "folio",
+        "element": "c4d8ebc2-c85b-4f9f-b077-0a9983ac7713",
+        "name": "number",
         "type": "text",
-        "value": "1r",
+        "value": "3",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1466,12 +1505,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "1e7765fb-e033-4a61-b3e5-e1eb7ef68931",
+    "pk": "41ae7e0c-7664-4761-8b0c-e0796c0c6e9a",
     "fields": {
-        "element": "f0cbb530-ae40-49c9-94c1-4112db294218",
+        "element": "86685f29-4c1c-430d-9a02-33e2f030d8a4",
         "name": "number",
         "type": "text",
-        "value": "3",
+        "value": "4",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1479,12 +1518,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "2cb11bc5-a284-4759-bfa7-e228e88f5098",
+    "pk": "44e614c5-f326-4182-8aa9-8a48fce3dd45",
     "fields": {
-        "element": "32fcd77c-74de-47f7-ac85-cefc592b3589",
-        "name": "number",
+        "element": "c247dccc-24f7-45d8-af2d-21d995d96a06",
+        "name": "folio",
         "type": "text",
-        "value": "4",
+        "value": "1v",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1492,9 +1531,9 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "5405be3c-bf01-4a59-8e76-486f192495b0",
+    "pk": "4e816a42-1cd5-4aa6-9899-b319fbd4fe73",
     "fields": {
-        "element": "1300f52d-1be6-48ed-b2b0-58e11173bfff",
+        "element": "a97097d1-589e-460c-95ac-38b347296fc1",
         "name": "folio",
         "type": "text",
         "value": "1r",
@@ -1505,12 +1544,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "8316a3e2-0fc3-4c13-b31b-56890665a04d",
+    "pk": "5acc73f2-afe4-4165-a8be-dc5e7623bc5e",
     "fields": {
-        "element": "dd43bc89-46d8-4382-8cb4-b37ba2502efe",
-        "name": "number",
+        "element": "1475dc86-17fa-4041-834e-22ad2b5f9fcc",
+        "name": "folio",
         "type": "text",
-        "value": "1",
+        "value": "1v",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1518,12 +1557,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "b7b6b973-d9e8-4ec2-bb75-4f6a72411b45",
+    "pk": "7c749dbb-3a31-4a70-9649-a5d67ecf0f51",
     "fields": {
-        "element": "9b57bb2f-e849-4a58-9f08-a35af64574ca",
+        "element": "ae16d416-d565-48ae-a666-576aa1b6d11e",
         "name": "folio",
         "type": "text",
-        "value": "1v",
+        "value": "2r",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1531,12 +1570,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "b868a014-5427-4f57-9170-ac516773672a",
+    "pk": "83da4c83-0a26-4fb1-84de-ad91da78a745",
     "fields": {
-        "element": "a11d7c51-9292-493e-92cb-f96a4f74c21f",
-        "name": "folio",
+        "element": "cec5365b-d23d-44f2-ab93-6f1c496b92b5",
+        "name": "number",
         "type": "text",
-        "value": "1v",
+        "value": "2",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1544,12 +1583,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "b87b5366-554a-4b41-b407-40911aeb1e10",
+    "pk": "8b5b4af9-d0d3-446a-a227-6c28c0f7874f",
     "fields": {
-        "element": "468bcf72-d74e-4e0c-a127-936431ba5d0c",
-        "name": "number",
+        "element": "f67f1916-fdba-4071-aca0-3d0eb6af88eb",
+        "name": "folio",
         "type": "text",
-        "value": "2",
+        "value": "1r",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1557,9 +1596,9 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "d6e386e2-382f-46cd-9a40-95da6492273c",
+    "pk": "9995d84f-a047-487d-864c-8ceb91d4c2a4",
     "fields": {
-        "element": "d42ff052-acd0-4034-b767-ddd243d495c1",
+        "element": "e617bb28-7e57-47b2-9f69-49c3db60b0e7",
         "name": "folio",
         "type": "text",
         "value": "2r",
@@ -1570,12 +1609,12 @@
 },
 {
     "model": "documents.metadata",
-    "pk": "f83fec0c-4987-4c85-a036-70e7978c68f0",
+    "pk": "c075fd97-2647-4efb-a2e4-72016d7851a5",
     "fields": {
-        "element": "8abafa5f-73b4-4a1e-9a86-0cc67175a99c",
-        "name": "folio",
+        "element": "028caaee-635f-4098-b63a-4af444c1e4a6",
+        "name": "number",
         "type": "text",
-        "value": "2r",
+        "value": "5",
         "entity": null,
         "worker_version": null,
         "worker_run": null
@@ -1598,7 +1637,7 @@
 },
 {
     "model": "images.image",
-    "pk": "0bad5afd-95cf-4f4c-831e-7bdf8f40245a",
+    "pk": "a807b209-c11d-431d-846f-5922fc7a9a44",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
@@ -1612,12 +1651,12 @@
 },
 {
     "model": "images.image",
-    "pk": "1589a5b4-9bed-4cf2-a415-bf0ed62b824d",
+    "pk": "abfbf945-d1b7-4434-964b-3952cb55536f",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "server": 1,
-        "path": "img4",
+        "path": "img6",
         "width": 1000,
         "height": 1000,
         "hash": null,
@@ -1626,12 +1665,12 @@
 },
 {
     "model": "images.image",
-    "pk": "1ae616c9-b268-4767-a4ff-f52b6ff11b1d",
+    "pk": "c51ad170-9f7f-433f-bce9-14c381affdc6",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "server": 1,
-        "path": "img6",
+        "path": "img4",
         "width": 1000,
         "height": 1000,
         "hash": null,
@@ -1640,12 +1679,12 @@
 },
 {
     "model": "images.image",
-    "pk": "4a3eb08c-9d54-4166-b8f7-23cbe5a7eb22",
+    "pk": "cb548ca8-942e-4eca-95d5-580991857d08",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "server": 1,
-        "path": "img5",
+        "path": "img1",
         "width": 1000,
         "height": 1000,
         "hash": null,
@@ -1654,7 +1693,7 @@
 },
 {
     "model": "images.image",
-    "pk": "4cfe4087-a46c-4ab5-8b02-65858bc618b0",
+    "pk": "cd977b64-409b-4d5e-96dc-2037057a7987",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
@@ -1668,12 +1707,12 @@
 },
 {
     "model": "images.image",
-    "pk": "b84876b7-0cc1-4351-88bf-72de33b9d319",
+    "pk": "e6cb1e0e-f8ee-405c-9c3e-3085c22aab98",
     "fields": {
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "server": 1,
-        "path": "img1",
+        "path": "img5",
         "width": 1000,
         "height": 1000,
         "hash": null,
@@ -1682,53 +1721,53 @@
 },
 {
     "model": "users.right",
-    "pk": "165c12ae-18eb-4626-bde4-7da3b33b0212",
+    "pk": "2320cf30-9d6b-4bcf-ac9c-20f9c55d68cb",
     "fields": {
         "user": 2,
         "group": null,
-        "content_type": 36,
-        "content_id": "731f809b-39ff-4928-932d-80ef5942c2d4",
+        "content_type": 35,
+        "content_id": "dd13c8e6-3912-4bcc-9c25-e50cc088edeb",
         "level": 100
     }
 },
 {
     "model": "users.right",
-    "pk": "28179a95-58d7-47fc-b69c-e0c971ff7a7e",
+    "pk": "89e34e22-4f82-4b73-a4bb-4eb3499db58b",
     "fields": {
-        "user": 4,
+        "user": 3,
         "group": null,
-        "content_type": 36,
-        "content_id": "731f809b-39ff-4928-932d-80ef5942c2d4",
-        "level": 10
+        "content_type": 35,
+        "content_id": "dd13c8e6-3912-4bcc-9c25-e50cc088edeb",
+        "level": 50
     }
 },
 {
     "model": "users.right",
-    "pk": "335faa82-d3cf-4614-afcb-0caa57a236db",
+    "pk": "e4287250-3c97-4333-8603-1b3c21aa133f",
     "fields": {
-        "user": 3,
+        "user": 2,
         "group": null,
-        "content_type": 36,
-        "content_id": "731f809b-39ff-4928-932d-80ef5942c2d4",
-        "level": 50
+        "content_type": 20,
+        "content_id": "21169dab-89f7-4865-af79-c9afb6b31a0c",
+        "level": 100
     }
 },
 {
     "model": "users.right",
-    "pk": "f822f6d7-9705-41ad-a7a2-2c53fe2fb15a",
+    "pk": "f82236a1-993f-4086-92bc-aec803974c00",
     "fields": {
-        "user": 2,
+        "user": 4,
         "group": null,
-        "content_type": 21,
-        "content_id": "eec95f5e-970a-4df3-8334-d8725a87065c",
-        "level": 100
+        "content_type": 35,
+        "content_id": "dd13c8e6-3912-4bcc-9c25-e50cc088edeb",
+        "level": 10
     }
 },
 {
     "model": "users.user",
     "pk": 1,
     "fields": {
-        "password": "pbkdf2_sha256$390000$Q3X6XS0VMS6Xfsrbpkc32R$EPhq5DZkkWvqs1Mz9BijkqV16H0zc4PJylHOiK7B22w=",
+        "password": "pbkdf2_sha256$390000$BWmGg7BTcdKFSxfnXocHOC$WirE10nSGLqFNi50I5kikFxCrLfW6HZDReMSpixJvnk=",
         "last_login": null,
         "email": "root@root.fr",
         "display_name": "Admin",
@@ -1744,7 +1783,7 @@
     "model": "users.user",
     "pk": 2,
     "fields": {
-        "password": "pbkdf2_sha256$390000$Nfh2586O2GSXcyiPUvvORz$sGKWfIhJ9tSeId5xeKkhJQ1Z7C410mKrf9FvVcVa3u8=",
+        "password": "pbkdf2_sha256$390000$ppjvgq9UVEus67altMa2de$DL3xcleLiycxov3XuQ28XmWjCZVZEnrM+FaraJK1zd8=",
         "last_login": null,
         "email": "user@user.fr",
         "display_name": "Test user",
@@ -1790,7 +1829,7 @@
 },
 {
     "model": "users.group",
-    "pk": "731f809b-39ff-4928-932d-80ef5942c2d4",
+    "pk": "dd13c8e6-3912-4bcc-9c25-e50cc088edeb",
     "fields": {
         "name": "User group",
         "public": false,
@@ -1799,7 +1838,7 @@
 },
 {
     "model": "users.oauthcredentials",
-    "pk": "0847cf6f-d9d0-4044-9760-8139e2c3a91b",
+    "pk": "1332d2da-50bc-49ab-9084-e75327d40fa1",
     "fields": {
         "user": 2,
         "provider_url": "https://somewhere",
@@ -2255,1643 +2294,1605 @@
     "model": "auth.permission",
     "pk": 50,
     "fields": {
-        "name": "Can add workflow",
+        "name": "Can add task",
         "content_type": 15,
-        "codename": "add_workflow"
+        "codename": "add_task"
     }
 },
 {
     "model": "auth.permission",
     "pk": 51,
     "fields": {
-        "name": "Can change workflow",
+        "name": "Can change task",
         "content_type": 15,
-        "codename": "change_workflow"
+        "codename": "change_task"
     }
 },
 {
     "model": "auth.permission",
     "pk": 52,
     "fields": {
-        "name": "Can delete workflow",
+        "name": "Can delete task",
         "content_type": 15,
-        "codename": "delete_workflow"
+        "codename": "delete_task"
     }
 },
 {
     "model": "auth.permission",
     "pk": 53,
     "fields": {
-        "name": "Can view workflow",
+        "name": "Can view task",
         "content_type": 15,
-        "codename": "view_workflow"
+        "codename": "view_task"
     }
 },
 {
     "model": "auth.permission",
     "pk": 54,
     "fields": {
-        "name": "Can add task",
-        "content_type": 16,
-        "codename": "add_task"
+        "name": "Can add agent user",
+        "content_type": 2,
+        "codename": "add_agentuser"
     }
 },
 {
     "model": "auth.permission",
     "pk": 55,
     "fields": {
-        "name": "Can change task",
-        "content_type": 16,
-        "codename": "change_task"
+        "name": "Can change agent user",
+        "content_type": 2,
+        "codename": "change_agentuser"
     }
 },
 {
     "model": "auth.permission",
     "pk": 56,
     "fields": {
-        "name": "Can delete task",
-        "content_type": 16,
-        "codename": "delete_task"
+        "name": "Can delete agent user",
+        "content_type": 2,
+        "codename": "delete_agentuser"
     }
 },
 {
     "model": "auth.permission",
     "pk": 57,
     "fields": {
-        "name": "Can view task",
-        "content_type": 16,
-        "codename": "view_task"
+        "name": "Can view agent user",
+        "content_type": 2,
+        "codename": "view_agentuser"
     }
 },
 {
     "model": "auth.permission",
     "pk": 58,
     "fields": {
-        "name": "Can add agent user",
-        "content_type": 2,
-        "codename": "add_agentuser"
+        "name": "Can add image",
+        "content_type": 16,
+        "codename": "add_image"
     }
 },
 {
     "model": "auth.permission",
     "pk": 59,
     "fields": {
-        "name": "Can change agent user",
-        "content_type": 2,
-        "codename": "change_agentuser"
+        "name": "Can change image",
+        "content_type": 16,
+        "codename": "change_image"
     }
 },
 {
     "model": "auth.permission",
     "pk": 60,
     "fields": {
-        "name": "Can delete agent user",
-        "content_type": 2,
-        "codename": "delete_agentuser"
+        "name": "Can delete image",
+        "content_type": 16,
+        "codename": "delete_image"
     }
 },
 {
     "model": "auth.permission",
     "pk": 61,
     "fields": {
-        "name": "Can view agent user",
-        "content_type": 2,
-        "codename": "view_agentuser"
+        "name": "Can view image",
+        "content_type": 16,
+        "codename": "view_image"
     }
 },
 {
     "model": "auth.permission",
     "pk": 62,
     "fields": {
-        "name": "Can add image",
+        "name": "Can add image server",
         "content_type": 17,
-        "codename": "add_image"
+        "codename": "add_imageserver"
     }
 },
 {
     "model": "auth.permission",
     "pk": 63,
     "fields": {
-        "name": "Can change image",
+        "name": "Can change image server",
         "content_type": 17,
-        "codename": "change_image"
+        "codename": "change_imageserver"
     }
 },
 {
     "model": "auth.permission",
     "pk": 64,
     "fields": {
-        "name": "Can delete image",
+        "name": "Can delete image server",
         "content_type": 17,
-        "codename": "delete_image"
+        "codename": "delete_imageserver"
     }
 },
 {
     "model": "auth.permission",
     "pk": 65,
     "fields": {
-        "name": "Can view image",
+        "name": "Can view image server",
         "content_type": 17,
-        "codename": "view_image"
+        "codename": "view_imageserver"
     }
 },
 {
     "model": "auth.permission",
     "pk": 66,
     "fields": {
-        "name": "Can add image server",
+        "name": "Can add allowed meta data",
         "content_type": 18,
-        "codename": "add_imageserver"
+        "codename": "add_allowedmetadata"
     }
 },
 {
     "model": "auth.permission",
     "pk": 67,
     "fields": {
-        "name": "Can change image server",
+        "name": "Can change allowed meta data",
         "content_type": 18,
-        "codename": "change_imageserver"
+        "codename": "change_allowedmetadata"
     }
 },
 {
     "model": "auth.permission",
     "pk": 68,
     "fields": {
-        "name": "Can delete image server",
+        "name": "Can delete allowed meta data",
         "content_type": 18,
-        "codename": "delete_imageserver"
+        "codename": "delete_allowedmetadata"
     }
 },
 {
     "model": "auth.permission",
     "pk": 69,
     "fields": {
-        "name": "Can view image server",
+        "name": "Can view allowed meta data",
         "content_type": 18,
-        "codename": "view_imageserver"
+        "codename": "view_allowedmetadata"
     }
 },
 {
     "model": "auth.permission",
     "pk": 70,
     "fields": {
-        "name": "Can add allowed meta data",
+        "name": "Can add classification",
         "content_type": 19,
-        "codename": "add_allowedmetadata"
+        "codename": "add_classification"
     }
 },
 {
     "model": "auth.permission",
     "pk": 71,
     "fields": {
-        "name": "Can change allowed meta data",
+        "name": "Can change classification",
         "content_type": 19,
-        "codename": "change_allowedmetadata"
+        "codename": "change_classification"
     }
 },
 {
     "model": "auth.permission",
     "pk": 72,
     "fields": {
-        "name": "Can delete allowed meta data",
+        "name": "Can delete classification",
         "content_type": 19,
-        "codename": "delete_allowedmetadata"
+        "codename": "delete_classification"
     }
 },
 {
     "model": "auth.permission",
     "pk": 73,
     "fields": {
-        "name": "Can view allowed meta data",
+        "name": "Can view classification",
         "content_type": 19,
-        "codename": "view_allowedmetadata"
+        "codename": "view_classification"
     }
 },
 {
     "model": "auth.permission",
     "pk": 74,
     "fields": {
-        "name": "Can add classification",
+        "name": "Can add corpus",
         "content_type": 20,
-        "codename": "add_classification"
+        "codename": "add_corpus"
     }
 },
 {
     "model": "auth.permission",
     "pk": 75,
     "fields": {
-        "name": "Can change classification",
+        "name": "Can change corpus",
         "content_type": 20,
-        "codename": "change_classification"
+        "codename": "change_corpus"
     }
 },
 {
     "model": "auth.permission",
     "pk": 76,
     "fields": {
-        "name": "Can delete classification",
+        "name": "Can delete corpus",
         "content_type": 20,
-        "codename": "delete_classification"
+        "codename": "delete_corpus"
     }
 },
 {
     "model": "auth.permission",
     "pk": 77,
     "fields": {
-        "name": "Can view classification",
+        "name": "Can view corpus",
         "content_type": 20,
-        "codename": "view_classification"
+        "codename": "view_corpus"
     }
 },
 {
     "model": "auth.permission",
     "pk": 78,
     "fields": {
-        "name": "Can add corpus",
+        "name": "Can add corpus export",
         "content_type": 21,
-        "codename": "add_corpus"
+        "codename": "add_corpusexport"
     }
 },
 {
     "model": "auth.permission",
     "pk": 79,
     "fields": {
-        "name": "Can change corpus",
+        "name": "Can change corpus export",
         "content_type": 21,
-        "codename": "change_corpus"
+        "codename": "change_corpusexport"
     }
 },
 {
     "model": "auth.permission",
     "pk": 80,
     "fields": {
-        "name": "Can delete corpus",
+        "name": "Can delete corpus export",
         "content_type": 21,
-        "codename": "delete_corpus"
+        "codename": "delete_corpusexport"
     }
 },
 {
     "model": "auth.permission",
     "pk": 81,
     "fields": {
-        "name": "Can view corpus",
+        "name": "Can view corpus export",
         "content_type": 21,
-        "codename": "view_corpus"
+        "codename": "view_corpusexport"
     }
 },
 {
     "model": "auth.permission",
     "pk": 82,
     "fields": {
-        "name": "Can add corpus export",
+        "name": "Can add element",
         "content_type": 22,
-        "codename": "add_corpusexport"
+        "codename": "add_element"
     }
 },
 {
     "model": "auth.permission",
     "pk": 83,
     "fields": {
-        "name": "Can change corpus export",
+        "name": "Can change element",
         "content_type": 22,
-        "codename": "change_corpusexport"
+        "codename": "change_element"
     }
 },
 {
     "model": "auth.permission",
     "pk": 84,
     "fields": {
-        "name": "Can delete corpus export",
+        "name": "Can delete element",
         "content_type": 22,
-        "codename": "delete_corpusexport"
+        "codename": "delete_element"
     }
 },
 {
     "model": "auth.permission",
     "pk": 85,
     "fields": {
-        "name": "Can view corpus export",
+        "name": "Can view element",
         "content_type": 22,
-        "codename": "view_corpusexport"
+        "codename": "view_element"
     }
 },
 {
     "model": "auth.permission",
     "pk": 86,
     "fields": {
-        "name": "Can add element",
+        "name": "Can add element path",
         "content_type": 23,
-        "codename": "add_element"
+        "codename": "add_elementpath"
     }
 },
 {
     "model": "auth.permission",
     "pk": 87,
     "fields": {
-        "name": "Can change element",
+        "name": "Can change element path",
         "content_type": 23,
-        "codename": "change_element"
+        "codename": "change_elementpath"
     }
 },
 {
     "model": "auth.permission",
     "pk": 88,
     "fields": {
-        "name": "Can delete element",
+        "name": "Can delete element path",
         "content_type": 23,
-        "codename": "delete_element"
-    }
-},
-{
-    "model": "auth.permission",
-    "pk": 89,
-    "fields": {
-        "name": "Can view element",
-        "content_type": 23,
-        "codename": "view_element"
-    }
-},
-{
-    "model": "auth.permission",
-    "pk": 90,
-    "fields": {
-        "name": "Can add element path",
-        "content_type": 24,
-        "codename": "add_elementpath"
-    }
-},
-{
-    "model": "auth.permission",
-    "pk": 91,
-    "fields": {
-        "name": "Can change element path",
-        "content_type": 24,
-        "codename": "change_elementpath"
-    }
-},
-{
-    "model": "auth.permission",
-    "pk": 92,
-    "fields": {
-        "name": "Can delete element path",
-        "content_type": 24,
         "codename": "delete_elementpath"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 93,
+    "pk": 89,
     "fields": {
         "name": "Can view element path",
-        "content_type": 24,
+        "content_type": 23,
         "codename": "view_elementpath"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 94,
+    "pk": 90,
     "fields": {
         "name": "Can add element type",
-        "content_type": 25,
+        "content_type": 24,
         "codename": "add_elementtype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 95,
+    "pk": 91,
     "fields": {
         "name": "Can change element type",
-        "content_type": 25,
+        "content_type": 24,
         "codename": "change_elementtype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 96,
+    "pk": 92,
     "fields": {
         "name": "Can delete element type",
-        "content_type": 25,
+        "content_type": 24,
         "codename": "delete_elementtype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 97,
+    "pk": 93,
     "fields": {
         "name": "Can view element type",
-        "content_type": 25,
+        "content_type": 24,
         "codename": "view_elementtype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 98,
+    "pk": 94,
     "fields": {
         "name": "Can add entity",
-        "content_type": 26,
+        "content_type": 25,
         "codename": "add_entity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 99,
+    "pk": 95,
     "fields": {
         "name": "Can change entity",
-        "content_type": 26,
+        "content_type": 25,
         "codename": "change_entity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 100,
+    "pk": 96,
     "fields": {
         "name": "Can delete entity",
-        "content_type": 26,
+        "content_type": 25,
         "codename": "delete_entity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 101,
+    "pk": 97,
     "fields": {
         "name": "Can view entity",
-        "content_type": 26,
+        "content_type": 25,
         "codename": "view_entity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 102,
+    "pk": 98,
     "fields": {
         "name": "Can add entity link",
-        "content_type": 27,
+        "content_type": 26,
         "codename": "add_entitylink"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 103,
+    "pk": 99,
     "fields": {
         "name": "Can change entity link",
-        "content_type": 27,
+        "content_type": 26,
         "codename": "change_entitylink"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 104,
+    "pk": 100,
     "fields": {
         "name": "Can delete entity link",
-        "content_type": 27,
+        "content_type": 26,
         "codename": "delete_entitylink"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 105,
+    "pk": 101,
     "fields": {
         "name": "Can view entity link",
-        "content_type": 27,
+        "content_type": 26,
         "codename": "view_entitylink"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 106,
+    "pk": 102,
     "fields": {
         "name": "Can add entity role",
-        "content_type": 28,
+        "content_type": 27,
         "codename": "add_entityrole"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 107,
+    "pk": 103,
     "fields": {
         "name": "Can change entity role",
-        "content_type": 28,
+        "content_type": 27,
         "codename": "change_entityrole"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 108,
+    "pk": 104,
     "fields": {
         "name": "Can delete entity role",
-        "content_type": 28,
+        "content_type": 27,
         "codename": "delete_entityrole"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 109,
+    "pk": 105,
     "fields": {
         "name": "Can view entity role",
-        "content_type": 28,
+        "content_type": 27,
         "codename": "view_entityrole"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 110,
+    "pk": 106,
     "fields": {
         "name": "Can add entity type",
-        "content_type": 29,
+        "content_type": 28,
         "codename": "add_entitytype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 111,
+    "pk": 107,
     "fields": {
         "name": "Can change entity type",
-        "content_type": 29,
+        "content_type": 28,
         "codename": "change_entitytype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 112,
+    "pk": 108,
     "fields": {
         "name": "Can delete entity type",
-        "content_type": 29,
+        "content_type": 28,
         "codename": "delete_entitytype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 113,
+    "pk": 109,
     "fields": {
         "name": "Can view entity type",
-        "content_type": 29,
+        "content_type": 28,
         "codename": "view_entitytype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 114,
+    "pk": 110,
     "fields": {
         "name": "Can add meta data",
-        "content_type": 30,
+        "content_type": 29,
         "codename": "add_metadata"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 115,
+    "pk": 111,
     "fields": {
         "name": "Can change meta data",
-        "content_type": 30,
+        "content_type": 29,
         "codename": "change_metadata"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 116,
+    "pk": 112,
     "fields": {
         "name": "Can delete meta data",
-        "content_type": 30,
+        "content_type": 29,
         "codename": "delete_metadata"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 117,
+    "pk": 113,
     "fields": {
         "name": "Can view meta data",
-        "content_type": 30,
+        "content_type": 29,
         "codename": "view_metadata"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 118,
+    "pk": 114,
     "fields": {
         "name": "Can add ml class",
-        "content_type": 31,
+        "content_type": 30,
         "codename": "add_mlclass"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 119,
+    "pk": 115,
     "fields": {
         "name": "Can change ml class",
-        "content_type": 31,
+        "content_type": 30,
         "codename": "change_mlclass"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 120,
+    "pk": 116,
     "fields": {
         "name": "Can delete ml class",
-        "content_type": 31,
+        "content_type": 30,
         "codename": "delete_mlclass"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 121,
+    "pk": 117,
     "fields": {
         "name": "Can view ml class",
-        "content_type": 31,
+        "content_type": 30,
         "codename": "view_mlclass"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 122,
+    "pk": 118,
     "fields": {
         "name": "Can add selection",
-        "content_type": 32,
+        "content_type": 31,
         "codename": "add_selection"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 123,
+    "pk": 119,
     "fields": {
         "name": "Can change selection",
-        "content_type": 32,
+        "content_type": 31,
         "codename": "change_selection"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 124,
+    "pk": 120,
     "fields": {
         "name": "Can delete selection",
-        "content_type": 32,
+        "content_type": 31,
         "codename": "delete_selection"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 125,
+    "pk": 121,
     "fields": {
         "name": "Can view selection",
-        "content_type": 32,
+        "content_type": 31,
         "codename": "view_selection"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 126,
+    "pk": 122,
     "fields": {
         "name": "Can add transcription",
-        "content_type": 33,
+        "content_type": 32,
         "codename": "add_transcription"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 127,
+    "pk": 123,
     "fields": {
         "name": "Can change transcription",
-        "content_type": 33,
+        "content_type": 32,
         "codename": "change_transcription"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 128,
+    "pk": 124,
     "fields": {
         "name": "Can delete transcription",
-        "content_type": 33,
+        "content_type": 32,
         "codename": "delete_transcription"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 129,
+    "pk": 125,
     "fields": {
         "name": "Can view transcription",
-        "content_type": 33,
+        "content_type": 32,
         "codename": "view_transcription"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 130,
+    "pk": 126,
     "fields": {
         "name": "Can add transcription entity",
-        "content_type": 34,
+        "content_type": 33,
         "codename": "add_transcriptionentity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 131,
+    "pk": 127,
     "fields": {
         "name": "Can change transcription entity",
-        "content_type": 34,
+        "content_type": 33,
         "codename": "change_transcriptionentity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 132,
+    "pk": 128,
     "fields": {
         "name": "Can delete transcription entity",
-        "content_type": 34,
+        "content_type": 33,
         "codename": "delete_transcriptionentity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 133,
+    "pk": 129,
     "fields": {
         "name": "Can view transcription entity",
-        "content_type": 34,
+        "content_type": 33,
         "codename": "view_transcriptionentity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 134,
+    "pk": 130,
     "fields": {
         "name": "Can add user",
-        "content_type": 35,
+        "content_type": 34,
         "codename": "add_user"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 135,
+    "pk": 131,
     "fields": {
         "name": "Can change user",
-        "content_type": 35,
+        "content_type": 34,
         "codename": "change_user"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 136,
+    "pk": 132,
     "fields": {
         "name": "Can delete user",
-        "content_type": 35,
+        "content_type": 34,
         "codename": "delete_user"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 137,
+    "pk": 133,
     "fields": {
         "name": "Can view user",
-        "content_type": 35,
+        "content_type": 34,
         "codename": "view_user"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 138,
+    "pk": 134,
     "fields": {
         "name": "Can add group",
-        "content_type": 36,
+        "content_type": 35,
         "codename": "add_group"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 139,
+    "pk": 135,
     "fields": {
         "name": "Can change group",
-        "content_type": 36,
+        "content_type": 35,
         "codename": "change_group"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 140,
+    "pk": 136,
     "fields": {
         "name": "Can delete group",
-        "content_type": 36,
+        "content_type": 35,
         "codename": "delete_group"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 141,
+    "pk": 137,
     "fields": {
         "name": "Can view group",
-        "content_type": 36,
+        "content_type": 35,
         "codename": "view_group"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 142,
+    "pk": 138,
     "fields": {
         "name": "Can add right",
-        "content_type": 37,
+        "content_type": 36,
         "codename": "add_right"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 143,
+    "pk": 139,
     "fields": {
         "name": "Can change right",
-        "content_type": 37,
+        "content_type": 36,
         "codename": "change_right"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 144,
+    "pk": 140,
     "fields": {
         "name": "Can delete right",
-        "content_type": 37,
+        "content_type": 36,
         "codename": "delete_right"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 145,
+    "pk": 141,
     "fields": {
         "name": "Can view right",
-        "content_type": 37,
+        "content_type": 36,
         "codename": "view_right"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 146,
+    "pk": 142,
     "fields": {
         "name": "Can add OAuth credentials",
-        "content_type": 38,
+        "content_type": 37,
         "codename": "add_oauthcredentials"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 147,
+    "pk": 143,
     "fields": {
         "name": "Can change OAuth credentials",
-        "content_type": 38,
+        "content_type": 37,
         "codename": "change_oauthcredentials"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 148,
+    "pk": 144,
     "fields": {
         "name": "Can delete OAuth credentials",
-        "content_type": 38,
+        "content_type": 37,
         "codename": "delete_oauthcredentials"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 149,
+    "pk": 145,
     "fields": {
         "name": "Can view OAuth credentials",
-        "content_type": 38,
+        "content_type": 37,
         "codename": "view_oauthcredentials"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 150,
+    "pk": 146,
     "fields": {
         "name": "Can add user scope",
-        "content_type": 39,
+        "content_type": 38,
         "codename": "add_userscope"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 151,
+    "pk": 147,
     "fields": {
         "name": "Can change user scope",
-        "content_type": 39,
+        "content_type": 38,
         "codename": "change_userscope"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 152,
+    "pk": 148,
     "fields": {
         "name": "Can delete user scope",
-        "content_type": 39,
+        "content_type": 38,
         "codename": "delete_userscope"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 153,
+    "pk": 149,
     "fields": {
         "name": "Can view user scope",
-        "content_type": 39,
+        "content_type": 38,
         "codename": "view_userscope"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 154,
+    "pk": 150,
     "fields": {
         "name": "Can add corpus worker version",
-        "content_type": 40,
+        "content_type": 39,
         "codename": "add_corpusworkerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 155,
+    "pk": 151,
     "fields": {
         "name": "Can change corpus worker version",
-        "content_type": 40,
+        "content_type": 39,
         "codename": "change_corpusworkerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 156,
+    "pk": 152,
     "fields": {
         "name": "Can delete corpus worker version",
-        "content_type": 40,
+        "content_type": 39,
         "codename": "delete_corpusworkerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 157,
+    "pk": 153,
     "fields": {
         "name": "Can view corpus worker version",
-        "content_type": 40,
+        "content_type": 39,
         "codename": "view_corpusworkerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 158,
+    "pk": 154,
     "fields": {
         "name": "Can add data file",
-        "content_type": 41,
+        "content_type": 40,
         "codename": "add_datafile"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 159,
+    "pk": 155,
     "fields": {
         "name": "Can change data file",
-        "content_type": 41,
+        "content_type": 40,
         "codename": "change_datafile"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 160,
+    "pk": 156,
     "fields": {
         "name": "Can delete data file",
-        "content_type": 41,
+        "content_type": 40,
         "codename": "delete_datafile"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 161,
+    "pk": 157,
     "fields": {
         "name": "Can view data file",
-        "content_type": 41,
+        "content_type": 40,
         "codename": "view_datafile"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 162,
+    "pk": 158,
     "fields": {
         "name": "Can add git ref",
-        "content_type": 42,
+        "content_type": 41,
         "codename": "add_gitref"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 163,
+    "pk": 159,
     "fields": {
         "name": "Can change git ref",
-        "content_type": 42,
+        "content_type": 41,
         "codename": "change_gitref"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 164,
+    "pk": 160,
     "fields": {
         "name": "Can delete git ref",
-        "content_type": 42,
+        "content_type": 41,
         "codename": "delete_gitref"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 165,
+    "pk": 161,
     "fields": {
         "name": "Can view git ref",
-        "content_type": 42,
+        "content_type": 41,
         "codename": "view_gitref"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 166,
+    "pk": 162,
     "fields": {
         "name": "Can add process",
-        "content_type": 43,
+        "content_type": 42,
         "codename": "add_process"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 167,
+    "pk": 163,
     "fields": {
         "name": "Can change process",
-        "content_type": 43,
+        "content_type": 42,
         "codename": "change_process"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 168,
+    "pk": 164,
     "fields": {
         "name": "Can delete process",
-        "content_type": 43,
+        "content_type": 42,
         "codename": "delete_process"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 169,
+    "pk": 165,
     "fields": {
         "name": "Can view process",
-        "content_type": 43,
+        "content_type": 42,
         "codename": "view_process"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 170,
+    "pk": 166,
     "fields": {
         "name": "Can add process element",
-        "content_type": 44,
+        "content_type": 43,
         "codename": "add_processelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 171,
+    "pk": 167,
     "fields": {
         "name": "Can change process element",
-        "content_type": 44,
+        "content_type": 43,
         "codename": "change_processelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 172,
+    "pk": 168,
     "fields": {
         "name": "Can delete process element",
-        "content_type": 44,
+        "content_type": 43,
         "codename": "delete_processelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 173,
+    "pk": 169,
     "fields": {
         "name": "Can view process element",
-        "content_type": 44,
+        "content_type": 43,
         "codename": "view_processelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 174,
+    "pk": 170,
     "fields": {
         "name": "Can add repository",
-        "content_type": 45,
+        "content_type": 44,
         "codename": "add_repository"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 175,
+    "pk": 171,
     "fields": {
         "name": "Can change repository",
-        "content_type": 45,
+        "content_type": 44,
         "codename": "change_repository"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 176,
+    "pk": 172,
     "fields": {
         "name": "Can delete repository",
-        "content_type": 45,
+        "content_type": 44,
         "codename": "delete_repository"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 177,
+    "pk": 173,
     "fields": {
         "name": "Can view repository",
-        "content_type": 45,
+        "content_type": 44,
         "codename": "view_repository"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 178,
+    "pk": 174,
     "fields": {
         "name": "Can add revision",
-        "content_type": 46,
+        "content_type": 45,
         "codename": "add_revision"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 179,
+    "pk": 175,
     "fields": {
         "name": "Can change revision",
-        "content_type": 46,
+        "content_type": 45,
         "codename": "change_revision"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 180,
+    "pk": 176,
     "fields": {
         "name": "Can delete revision",
-        "content_type": 46,
+        "content_type": 45,
         "codename": "delete_revision"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 181,
+    "pk": 177,
     "fields": {
         "name": "Can view revision",
-        "content_type": 46,
+        "content_type": 45,
         "codename": "view_revision"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 182,
+    "pk": 178,
     "fields": {
         "name": "Can add worker",
-        "content_type": 47,
+        "content_type": 46,
         "codename": "add_worker"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 183,
+    "pk": 179,
     "fields": {
         "name": "Can change worker",
-        "content_type": 47,
+        "content_type": 46,
         "codename": "change_worker"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 184,
+    "pk": 180,
     "fields": {
         "name": "Can delete worker",
-        "content_type": 47,
+        "content_type": 46,
         "codename": "delete_worker"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 185,
+    "pk": 181,
     "fields": {
         "name": "Can view worker",
-        "content_type": 47,
+        "content_type": 46,
         "codename": "view_worker"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 186,
+    "pk": 182,
     "fields": {
         "name": "Can add worker activity",
-        "content_type": 48,
+        "content_type": 47,
         "codename": "add_workeractivity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 187,
+    "pk": 183,
     "fields": {
         "name": "Can change worker activity",
-        "content_type": 48,
+        "content_type": 47,
         "codename": "change_workeractivity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 188,
+    "pk": 184,
     "fields": {
         "name": "Can delete worker activity",
-        "content_type": 48,
+        "content_type": 47,
         "codename": "delete_workeractivity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 189,
+    "pk": 185,
     "fields": {
         "name": "Can view worker activity",
-        "content_type": 48,
+        "content_type": 47,
         "codename": "view_workeractivity"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 190,
+    "pk": 186,
     "fields": {
         "name": "Can add worker configuration",
-        "content_type": 49,
+        "content_type": 48,
         "codename": "add_workerconfiguration"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 191,
+    "pk": 187,
     "fields": {
         "name": "Can change worker configuration",
-        "content_type": 49,
+        "content_type": 48,
         "codename": "change_workerconfiguration"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 192,
+    "pk": 188,
     "fields": {
         "name": "Can delete worker configuration",
-        "content_type": 49,
+        "content_type": 48,
         "codename": "delete_workerconfiguration"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 193,
+    "pk": 189,
     "fields": {
         "name": "Can view worker configuration",
-        "content_type": 49,
+        "content_type": 48,
         "codename": "view_workerconfiguration"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 194,
+    "pk": 190,
     "fields": {
         "name": "Can add worker type",
-        "content_type": 50,
+        "content_type": 49,
         "codename": "add_workertype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 195,
+    "pk": 191,
     "fields": {
         "name": "Can change worker type",
-        "content_type": 50,
+        "content_type": 49,
         "codename": "change_workertype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 196,
+    "pk": 192,
     "fields": {
         "name": "Can delete worker type",
-        "content_type": 50,
+        "content_type": 49,
         "codename": "delete_workertype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 197,
+    "pk": 193,
     "fields": {
         "name": "Can view worker type",
-        "content_type": 50,
+        "content_type": 49,
         "codename": "view_workertype"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 198,
+    "pk": 194,
     "fields": {
         "name": "Can add worker version",
-        "content_type": 51,
+        "content_type": 50,
         "codename": "add_workerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 199,
+    "pk": 195,
     "fields": {
         "name": "Can change worker version",
-        "content_type": 51,
+        "content_type": 50,
         "codename": "change_workerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 200,
+    "pk": 196,
     "fields": {
         "name": "Can delete worker version",
-        "content_type": 51,
+        "content_type": 50,
         "codename": "delete_workerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 201,
+    "pk": 197,
     "fields": {
         "name": "Can view worker version",
-        "content_type": 51,
+        "content_type": 50,
         "codename": "view_workerversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 202,
+    "pk": 198,
     "fields": {
         "name": "Can add worker run",
-        "content_type": 52,
+        "content_type": 51,
         "codename": "add_workerrun"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 203,
+    "pk": 199,
     "fields": {
         "name": "Can change worker run",
-        "content_type": 52,
+        "content_type": 51,
         "codename": "change_workerrun"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 204,
+    "pk": 200,
     "fields": {
         "name": "Can delete worker run",
-        "content_type": 52,
+        "content_type": 51,
         "codename": "delete_workerrun"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 205,
+    "pk": 201,
     "fields": {
         "name": "Can view worker run",
-        "content_type": 52,
+        "content_type": 51,
         "codename": "view_workerrun"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 206,
+    "pk": 202,
     "fields": {
         "name": "Can add dataset",
-        "content_type": 53,
+        "content_type": 52,
         "codename": "add_dataset"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 207,
+    "pk": 203,
     "fields": {
         "name": "Can change dataset",
-        "content_type": 53,
+        "content_type": 52,
         "codename": "change_dataset"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 208,
+    "pk": 204,
     "fields": {
         "name": "Can delete dataset",
-        "content_type": 53,
+        "content_type": 52,
         "codename": "delete_dataset"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 209,
+    "pk": 205,
     "fields": {
         "name": "Can view dataset",
-        "content_type": 53,
+        "content_type": 52,
         "codename": "view_dataset"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 210,
+    "pk": 206,
     "fields": {
         "name": "Can add metric key",
-        "content_type": 54,
+        "content_type": 53,
         "codename": "add_metrickey"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 211,
+    "pk": 207,
     "fields": {
         "name": "Can change metric key",
-        "content_type": 54,
+        "content_type": 53,
         "codename": "change_metrickey"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 212,
+    "pk": 208,
     "fields": {
         "name": "Can delete metric key",
-        "content_type": 54,
+        "content_type": 53,
         "codename": "delete_metrickey"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 213,
+    "pk": 209,
     "fields": {
         "name": "Can view metric key",
-        "content_type": 54,
+        "content_type": 53,
         "codename": "view_metrickey"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 214,
+    "pk": 210,
     "fields": {
         "name": "Can add model",
-        "content_type": 55,
+        "content_type": 54,
         "codename": "add_model"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 215,
+    "pk": 211,
     "fields": {
         "name": "Can change model",
-        "content_type": 55,
+        "content_type": 54,
         "codename": "change_model"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 216,
+    "pk": 212,
     "fields": {
         "name": "Can delete model",
-        "content_type": 55,
+        "content_type": 54,
         "codename": "delete_model"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 217,
+    "pk": 213,
     "fields": {
         "name": "Can view model",
-        "content_type": 55,
+        "content_type": 54,
         "codename": "view_model"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 218,
+    "pk": 214,
     "fields": {
         "name": "Can add model version",
-        "content_type": 56,
+        "content_type": 55,
         "codename": "add_modelversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 219,
+    "pk": 215,
     "fields": {
         "name": "Can change model version",
-        "content_type": 56,
+        "content_type": 55,
         "codename": "change_modelversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 220,
+    "pk": 216,
     "fields": {
         "name": "Can delete model version",
-        "content_type": 56,
+        "content_type": 55,
         "codename": "delete_modelversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 221,
+    "pk": 217,
     "fields": {
         "name": "Can view model version",
-        "content_type": 56,
+        "content_type": 55,
         "codename": "view_modelversion"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 222,
+    "pk": 218,
     "fields": {
         "name": "Can add metric value",
-        "content_type": 57,
+        "content_type": 56,
         "codename": "add_metricvalue"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 223,
+    "pk": 219,
     "fields": {
         "name": "Can change metric value",
-        "content_type": 57,
+        "content_type": 56,
         "codename": "change_metricvalue"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 224,
+    "pk": 220,
     "fields": {
         "name": "Can delete metric value",
-        "content_type": 57,
+        "content_type": 56,
         "codename": "delete_metricvalue"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 225,
+    "pk": 221,
     "fields": {
         "name": "Can view metric value",
-        "content_type": 57,
+        "content_type": 56,
         "codename": "view_metricvalue"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 226,
+    "pk": 222,
     "fields": {
         "name": "Can add dataset element",
-        "content_type": 58,
+        "content_type": 57,
         "codename": "add_datasetelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 227,
+    "pk": 223,
     "fields": {
         "name": "Can change dataset element",
-        "content_type": 58,
+        "content_type": 57,
         "codename": "change_datasetelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 228,
+    "pk": 224,
     "fields": {
         "name": "Can delete dataset element",
-        "content_type": 58,
+        "content_type": 57,
         "codename": "delete_datasetelement"
     }
 },
 {
     "model": "auth.permission",
-    "pk": 229,
+    "pk": 225,
     "fields": {
         "name": "Can view dataset element",
-        "content_type": 58,
+        "content_type": 57,
         "codename": "view_datasetelement"
     }
 },
 {
     "model": "ponos.farm",
-    "pk": "fb8128f6-fb4a-49ee-8616-f049f305282c",
+    "pk": "2baf9bee-0b5e-479d-bf68-c0cea79b025f",
     "fields": {
-        "name": "Wheat farm",
-        "seed": "a492fd01413d1bce9fd927ff545d73a7598bdb94f8f8d27eca711c17b6da0205"
+        "name": "Default farm",
+        "seed": "3c8d69afdb528e7f5753f8d122cb613d9b797ae47cb791f6b0f3a549abde7da0"
     }
 },
 {
-    "model": "ponos.workflow",
-    "pk": "f54a3eb8-dd66-49c0-aca7-565f45ab2c5c",
+    "model": "ponos.farm",
+    "pk": "6dfc651f-c7c2-4e50-9306-c9de3ceeb7b4",
     "fields": {
-        "created": "2020-02-02T01:23:45.678Z",
-        "updated": "2020-02-02T01:23:45.678Z",
-        "finished": null,
-        "farm": "fb8128f6-fb4a-49ee-8616-f049f305282c"
+        "name": "Wheat farm",
+        "seed": "dee49256eee2264043b0e96b650436a571710ce346b106dde8a1e2871a53dac8"
     }
 },
 {
     "model": "ponos.task",
-    "pk": "fd554cf4-464f-49a0-9e18-f06b8d72a391",
+    "pk": "9f5d8a55-bf86-4271-9dcf-eb3b54cbb273",
     "fields": {
         "run": 0,
         "depth": 0,
@@ -3908,21 +3909,21 @@
         "agent": null,
         "requires_gpu": false,
         "gpu": null,
-        "workflow": "f54a3eb8-dd66-49c0-aca7-565f45ab2c5c",
+        "process": "49ca1236-f0ca-4e5c-a733-0172663be1b4",
         "container": null,
         "created": "2020-02-02T01:23:45.678Z",
         "updated": "2020-02-02T01:23:45.678Z",
         "expiry": "2100-12-31T23:59:59.999Z",
         "extra_files": "{}",
-        "token": "AGFSgxF5TMiy/nNilurQSJMVwW+TcERwtqHl7qhW4Tk=",
+        "token": "EFvURBJOSY2pFy2YI+I6+vX3ynfKg00OsQsEl2ulYio=",
         "parents": []
     }
 },
 {
     "model": "ponos.artifact",
-    "pk": "064fed90-4f2b-400a-884e-06c280f41228",
+    "pk": "2612a9e3-8736-4ffb-a172-6e00d5fa19fa",
     "fields": {
-        "task": "fd554cf4-464f-49a0-9e18-f06b8d72a391",
+        "task": "9f5d8a55-bf86-4271-9dcf-eb3b54cbb273",
         "path": "/path/to/docker_build",
         "size": 42000,
         "content_type": "application/octet-stream",
diff --git a/arkindex/documents/management/commands/build_fixtures.py b/arkindex/documents/management/commands/build_fixtures.py
index b493c11e77..d4e0ae2f81 100644
--- a/arkindex/documents/management/commands/build_fixtures.py
+++ b/arkindex/documents/management/commands/build_fixtures.py
@@ -17,7 +17,6 @@ from arkindex.process.models import (
     WorkerVersion,
     WorkerVersionGPUUsage,
     WorkerVersionState,
-    Workflow,
 )
 from arkindex.project.tools import fake_now
 from arkindex.users.models import Group, Right, Role, User
@@ -112,10 +111,19 @@ class Command(BaseCommand):
 
         # Create a fake docker build with a docker image task
         farm = Farm.objects.create(name="Wheat farm")
-        workflow = Workflow.objects.create(farm=farm)
-        # Use an expiry very far away so that task is never expired
-        task_expiry = datetime(2100, 12, 31, 23, 59, 59, 999999, timezone.utc)
-        build_task = workflow.tasks.create(run=0, depth=0, slug='docker_build', state=State.Completed, expiry=task_expiry)
+        build_process = Process.objects.create(
+            farm=farm,
+            creator=superuser,
+            mode=ProcessMode.Repository,
+        )
+        build_task = build_process.tasks.create(
+            run=0,
+            depth=0,
+            slug='docker_build',
+            state=State.Completed,
+            # Use an expiry very far away so that task is never expired
+            expiry=datetime(2100, 12, 31, 23, 59, 59, 999999, timezone.utc),
+        )
         docker_image = build_task.artifacts.create(size=42_000, path='/path/to/docker_build')
 
         # Create some workers for the repository with their available version
diff --git a/arkindex/documents/management/commands/cleanup.py b/arkindex/documents/management/commands/cleanup.py
index 69b73625e3..7119b9852e 100644
--- a/arkindex/documents/management/commands/cleanup.py
+++ b/arkindex/documents/management/commands/cleanup.py
@@ -13,30 +13,28 @@ from rq.utils import as_text
 
 from arkindex.documents.models import CorpusExport, CorpusExportState, Element
 from arkindex.images.models import Image, ImageServer
-from arkindex.ponos.models import Artifact, Task, Workflow
-from arkindex.process.models import DataFile, GitRef, GitRefType, WorkerVersion, WorkerVersionState
+from arkindex.ponos.models import Artifact, Task
+from arkindex.process.models import DataFile, GitRef, GitRefType, Process, WorkerVersion, WorkerVersionState
 from arkindex.project.aws import s3
 from arkindex.project.rq_overrides import Job
 from arkindex.training.models import ModelVersion
 from redis.exceptions import ConnectionError
 
-# Ponos artifacts use the path: <workflow uuid>/<task id>/<path>
-# Before June 2020, artifacts used <workflow uuid>/run_<run id>/<task id>.tar.zst
-REGEX_ARTIFACT = re.compile(r'^(?P<workflow_id>[0-9a-f\-]{36})/(?P<task_id>[0-9a-f\-]{36})/')
-REGEX_OLD_ARTIFACT = re.compile(r'^(?P<workflow_id>[0-9a-f\-]{36})/run_(?P<run_id>[0-9]+)/(?P<task_id>[0-9a-f\-]{36})\.tar\.zst$')
-# Ponos logs use the path: <workflow uuid>/run_<run id>/<task id>.log
-REGEX_LOG = re.compile(r'^(?P<workflow_id>[0-9a-f\-]{36})/run_(?P<run_id>[0-9]+)/(?P<task_id>[0-9a-f\-]{36})\.log$')
+# Ponos artifacts use the path: <task id>/<path>
+REGEX_ARTIFACT = re.compile(r'^(?P<task_id>[0-9a-f\-]{36})/')
+# Ponos logs use the path: <task id>.log
+REGEX_LOG = re.compile(r'^(?P<task_id>[0-9a-f\-]{36})\.log$')
 # Model version artifacts use the path: <modelversion uuid>.zst
 REGEX_MODEL_VERSION = re.compile(r'^(?P<modelversion_id>[0-9a-f\-]{36})\.zst$')
 
 
 class Command(BaseCommand):
-    help = 'Clean up old corpus exports, trashed DataFiles, expired Ponos workflows and S3 buckets'
+    help = 'Clean up old corpus exports, trashed DataFiles, expired processes and S3 buckets'
 
     def handle(self, *args, **options):
         self.cleanup_artifacts()
 
-        self.cleanup_expired_workflows()
+        self.cleanup_expired_processes()
 
         self.cleanup_old_exports()
 
@@ -63,26 +61,16 @@ class Command(BaseCommand):
         bucket = s3.Bucket(settings.PONOS_S3_ARTIFACTS_BUCKET)
         for obj in bucket.objects.all():
 
-            # Parse workflow and task
+            # Parse process and task
             match = REGEX_ARTIFACT.match(obj.key)
             if match is None:
-                old_match = REGEX_OLD_ARTIFACT.match(obj.key)
-                if old_match is None:
-                    self.stdout.write(self.style.WARNING(f"Unsupported artifact {obj.key}"))
-                    continue
-                # If REGEX_OLD_ARTIFACT matches the file, it is an obsolete and unsupported artifact so it
-                # can be deleted without looking for a matching task
-                self.stdout.write(f'Removing obsolete artifact {obj.key}…')
-                try:
-                    obj.delete()
-                except ClientError as e:
-                    self.stdout.write(self.style.ERROR(str(e)))
+                self.stdout.write(self.style.WARNING(f"Unsupported artifact {obj.key}"))
                 continue
 
             # Find matching task
             matched_ids = match.groupdict()
             try:
-                Task.objects.only('id').get(workflow_id=matched_ids['workflow_id'], id=matched_ids['task_id'])
+                Task.objects.only('id').get(id=matched_ids['task_id'])
             except Task.DoesNotExist:
                 # When no task is found, delete the S3 file
                 self.stdout.write(f'Removing artifact {obj.key}…')
@@ -93,9 +81,9 @@ class Command(BaseCommand):
 
         self.stdout.write(self.style.SUCCESS('Successfully cleaned up orphaned Ponos artifacts.'))
 
-    def cleanup_expired_workflows(self):
-        # Keep workflows that built artifacts for WorkerVersions on Git tags or main branches
-        worker_version_docker_image_workflows = (
+    def cleanup_expired_processes(self):
+        # Keep processes that built artifacts for WorkerVersions on Git tags or main branches
+        worker_version_docker_image_processes = (
             GitRef
             .objects
             .filter(
@@ -103,30 +91,30 @@ class Command(BaseCommand):
                 | Q(type=GitRefType.Branch, name__in=('master', 'main'))
             )
             # There might be a revision with no WorkerVersions at all, or a revision with
-            # no WorkerVersions that have a docker_image, which could cause the workflow ID
+            # no WorkerVersions that have a docker_image, which could cause the process ID
             # to be NULL. This query will be used in a NOT IN clause, which would return
-            # FALSE when a workflow is in this subquery, and NULL when it isn't, because
+            # FALSE when a process is in this subquery, and NULL when it isn't, because
             # SQL handles NULL values weirdly. This would cause the parent query to evaluate
-            # a WHERE NULL, which is assumed to be FALSE, so all workflows would be excluded.
+            # a WHERE NULL, which is assumed to be FALSE, so all processes would be excluded.
             #
-            # Excluding NULLs directly with a .exclude(revision__...__workflow_id=None)
+            # Excluding NULLs directly with a .exclude(revision__...__process_id=None)
             # causes the JOINs to be duplicated, so we use an annotation to make sure the
             # ORM understands we are filtering on the column that we are selecting.
-            .annotate(workflow_id=F('revision__versions__docker_image__task__workflow_id'))
-            .exclude(workflow_id=None)
-            .values('workflow_id')
+            .annotate(process_id=F('revision__versions__docker_image__task__process_id'))
+            .exclude(process_id=None)
+            .values('process_id')
         )
 
-        expired_workflows = Workflow \
+        expired_processes = Process \
             .objects \
             .annotate(max_expiry=Max('tasks__expiry')) \
             .filter(max_expiry__lt=timezone.now()) \
-            .exclude(id__in=worker_version_docker_image_workflows)
+            .exclude(id__in=worker_version_docker_image_processes)
 
-        tasks = Task.objects.filter(workflow__in=expired_workflows)
-        artifacts = Artifact.objects.filter(task__workflow__in=expired_workflows)
+        tasks = Task.objects.filter(process__in=expired_processes)
+        artifacts = Artifact.objects.filter(task__process__in=expired_processes)
 
-        self.stdout.write(f'Removing {artifacts.count()} artifacts of expired workflows from S3…')
+        self.stdout.write(f'Removing {artifacts.count()} artifacts of expired processes from S3…')
         for artifact in artifacts.select_related('task').iterator():
             self.stdout.write(f'Removing artifact {artifact.s3.key}')
             try:
@@ -134,8 +122,8 @@ class Command(BaseCommand):
             except ClientError as e:
                 self.stdout.write(self.style.ERROR(str(e)))
 
-        self.stdout.write(f'Removing logs for {tasks.count()} tasks of expired workflows from S3…')
-        for task in tasks.select_related('workflow').iterator():
+        self.stdout.write(f'Removing logs for {tasks.count()} tasks of expired processes from S3…')
+        for task in tasks.iterator():
             self.stdout.write(f'Removing task log {task.s3_logs.key}')
             try:
                 task.s3_logs.delete()
@@ -148,20 +136,15 @@ class Command(BaseCommand):
         self.stdout.write(f'Updating {affected_versions.count()} available worker versions to the Error state…')
         affected_versions.update(state=WorkerVersionState.Error)
 
-        self.stdout.write(f'Removing {artifacts.count()} artifacts of expired workflows…')
+        self.stdout.write(f'Removing {artifacts.count()} artifacts of expired processes…')
         artifacts.delete()
 
-        self.stdout.write(f'Removing {tasks.count()} tasks of expired workflows…')
-        # Deleting the tasks will cause expired_workflows to be empty, since all workflows will lose their expiry.
-        # Storing the workflow IDs in a list here is not perfect,
-        # but is more efficient than Django's very heavy cascade deletion system.
-        workflow_ids = list(expired_workflows.values_list('id', flat=True))
+        self.stdout.write(f'Removing {tasks.count()} tasks of expired processes…')
         tasks.delete()
 
-        self.stdout.write(f'Removing {len(workflow_ids)} expired workflows…')
-        Workflow.objects.filter(id__in=workflow_ids).delete()
-
-        self.stdout.write(self.style.SUCCESS('Successfully cleaned up expired workflows.'))
+        # Expired processes are not actually deleted themselves, just emptied,
+        # because deleting them implies removing WorkerRuns and WorkerActivities.
+        self.stdout.write(self.style.SUCCESS('Successfully cleaned up expired processes.'))
 
     def cleanup_old_exports(self):
         # Corpus exports that are over 2 weeks old
@@ -289,7 +272,7 @@ class Command(BaseCommand):
         bucket = s3.Bucket(settings.PONOS_S3_LOGS_BUCKET)
         for obj in bucket.objects.all():
 
-            # Parse workflow, run and task ID
+            # Parse process, run and task ID
             match = REGEX_LOG.match(obj.key)
             if match is None:
                 self.stdout.write(self.style.WARNING(f"Unsupported log {obj.key}"))
@@ -298,7 +281,7 @@ class Command(BaseCommand):
             # Find matching task
             matched_ids = match.groupdict()
             try:
-                Task.objects.only('id').get(workflow_id=matched_ids['workflow_id'], id=matched_ids['task_id'], run=matched_ids['run_id'])
+                Task.objects.only('id').get(id=matched_ids['task_id'])
             except Task.DoesNotExist:
                 # When no task is found, delete the S3 file
                 self.stdout.write(f'Removing log {obj.key}…')
diff --git a/arkindex/documents/tasks.py b/arkindex/documents/tasks.py
index 9712f69f41..dee63708e2 100644
--- a/arkindex/documents/tasks.py
+++ b/arkindex/documents/tasks.py
@@ -22,6 +22,7 @@ from arkindex.documents.models import (
     Transcription,
     TranscriptionEntity,
 )
+from arkindex.ponos.models import Task
 from arkindex.process.models import Process, ProcessElement, WorkerActivity, WorkerRun
 from arkindex.users.models import User
 
@@ -72,6 +73,7 @@ def corpus_delete(corpus_id: str) -> None:
         corpus.memberships.all(),
         corpus.exports.all(),
         WorkerRun.objects.filter(process__corpus_id=corpus_id),
+        Task.objects.filter(process__corpus_id=corpus_id),
         corpus.processes.all(),
         Corpus.objects.filter(id=corpus_id),
     ]
diff --git a/arkindex/documents/tests/commands/test_cleanup.py b/arkindex/documents/tests/commands/test_cleanup.py
index a994d91d4f..bbbc57bf93 100644
--- a/arkindex/documents/tests/commands/test_cleanup.py
+++ b/arkindex/documents/tests/commands/test_cleanup.py
@@ -10,8 +10,8 @@ from django.test import override_settings
 
 from arkindex.documents.models import CorpusExport, CorpusExportState, Element
 from arkindex.images.models import Image, ImageServer
-from arkindex.ponos.models import Artifact, Farm, Task, Workflow
-from arkindex.process.models import DataFile, GitRefType, Repository, WorkerVersionState
+from arkindex.ponos.models import Artifact, Farm, Task
+from arkindex.process.models import DataFile, GitRefType, Process, ProcessMode, Repository, WorkerVersionState
 from arkindex.project.tests import FixtureTestCase
 from arkindex.training.models import Model, ModelVersion
 
@@ -43,13 +43,12 @@ class TestCleanupCommand(FixtureTestCase):
                 f"""
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 1 old corpus exports from S3…
                 Removing export {done_export.id} from S3…
                 Removing 2 old corpus exports…
@@ -86,13 +85,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -127,13 +125,12 @@ class TestCleanupCommand(FixtureTestCase):
                 f"""
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 1 old corpus exports from S3…
                 Removing export {done_export.id} from S3…
                 Export {done_export.id} not found on S3, skipping
@@ -178,13 +175,12 @@ class TestCleanupCommand(FixtureTestCase):
                 f"""
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 1 old corpus exports from S3…
                 Removing export {done_export.id} from S3…
                 Removing 1 old corpus exports…
@@ -231,13 +227,12 @@ class TestCleanupCommand(FixtureTestCase):
                 f"""
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -291,13 +286,12 @@ class TestCleanupCommand(FixtureTestCase):
                 f"""
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -326,24 +320,27 @@ class TestCleanupCommand(FixtureTestCase):
 
     @patch('arkindex.documents.management.commands.cleanup.s3')
     def test_cleanup_artifacts(self, cleanup_s3_mock, s3_mock, rq_mock):
-        workflow = Workflow.objects.create(farm=Farm.objects.first())
-        task = workflow.tasks.create(run=0, depth=0, slug='task')
+        process = Process.objects.create(
+            farm=Farm.objects.first(),
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
+        task = process.tasks.create(run=0, depth=0, slug='task')
 
         good_s3_artifact = MagicMock()
-        good_s3_artifact.key = f'{workflow.id}/{task.id}/path/to/thing.txt'
+        good_s3_artifact.key = f'{task.id}/path/to/thing.txt'
         orphan_s3_artifact = MagicMock()
-        orphan_s3_artifact.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/sad/artifact.zip'
-        obsolete_artifact = MagicMock()
-        obsolete_artifact.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_0/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.tar.zst'
+        orphan_s3_artifact.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/sad/artifact.zip'
         unsupported_s3_artifact = MagicMock()
         unsupported_s3_artifact.key = 'cant_touch_this.txt.vbs'
         broken_s3_artifact = MagicMock()
-        broken_s3_artifact.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/nope.zip'
+        broken_s3_artifact.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/nope.zip'
         broken_s3_artifact.delete.side_effect = ClientError({'Error': {'Code': '500'}}, 'delete_object')
 
         cleanup_s3_mock.Bucket.return_value.objects.all.side_effect = [
             # Bucket for Ponos artifacts
-            [good_s3_artifact, orphan_s3_artifact, obsolete_artifact, unsupported_s3_artifact, broken_s3_artifact],
+            [good_s3_artifact, orphan_s3_artifact, unsupported_s3_artifact, broken_s3_artifact],
             # Bucket for corpus exports
             [],
             # Bucket for IIIF images
@@ -359,19 +356,17 @@ class TestCleanupCommand(FixtureTestCase):
             dedent(
                 """
                 Removing orphaned Ponos artifacts…
-                Removing artifact aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/sad/artifact.zip…
-                Removing obsolete artifact aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_0/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.tar.zst…
+                Removing artifact aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/sad/artifact.zip…
                 Unsupported artifact cant_touch_this.txt.vbs
-                Removing artifact aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/nope.zip…
+                Removing artifact aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/nope.zip…
                 An error occurred (500) when calling the delete_object operation: Unknown
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -407,11 +402,15 @@ class TestCleanupCommand(FixtureTestCase):
         self.assertEqual(broken_s3_artifact.delete.call_count, 1)
 
     @patch('arkindex.ponos.models.s3')
-    def test_cleanup_expired_workflows(self, ponos_s3_mock, s3_mock, rq_mock):
+    def test_cleanup_expired_processes(self, ponos_s3_mock, s3_mock, rq_mock):
         farm = Farm.objects.first()
 
-        expired_workflow = farm.workflows.create()
-        expired_task = expired_workflow.tasks.create(
+        expired_process = farm.processes.create(
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
+        expired_task = expired_process.tasks.create(
             run=0,
             depth=0,
             slug='task',
@@ -419,8 +418,12 @@ class TestCleanupCommand(FixtureTestCase):
         )
         expired_artifact = expired_task.artifacts.create(path='nope.txt', size=256)
 
-        non_expired_workflow = farm.workflows.create()
-        non_expired_task = non_expired_workflow.tasks.create(
+        non_expired_process = farm.processes.create(
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
+        non_expired_task = non_expired_process.tasks.create(
             run=0,
             depth=0,
             slug='task',
@@ -429,8 +432,12 @@ class TestCleanupCommand(FixtureTestCase):
         )
         non_expired_artifact = non_expired_task.artifacts.create(path='artsy-fact', size=1337)
 
-        # A workflow with no tasks at all never expires
-        empty_workflow = farm.workflows.create()
+        # A process with no tasks at all never expires
+        empty_process = farm.processes.create(
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
 
         ponos_s3_mock.Object().key = 's3_key'
 
@@ -440,15 +447,14 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 1 artifacts of expired workflows from S3…
+                Removing 1 artifacts of expired processes from S3…
                 Removing artifact s3_key
-                Removing logs for 1 tasks of expired workflows from S3…
+                Removing logs for 1 tasks of expired processes from S3…
                 Removing task log s3_key
                 Updating 0 available worker versions to the Error state…
-                Removing 1 artifacts of expired workflows…
-                Removing 1 tasks of expired workflows…
-                Removing 1 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 1 artifacts of expired processes…
+                Removing 1 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -470,27 +476,31 @@ class TestCleanupCommand(FixtureTestCase):
             ).strip()
         )
 
-        with self.assertRaises(Workflow.DoesNotExist):
-            expired_workflow.refresh_from_db()
         with self.assertRaises(Task.DoesNotExist):
             expired_task.refresh_from_db()
         with self.assertRaises(Artifact.DoesNotExist):
             expired_artifact.refresh_from_db()
 
         # Those still exist, refreshing works
-        non_expired_workflow.refresh_from_db()
+        expired_process.refresh_from_db()
+        non_expired_process.refresh_from_db()
         non_expired_task.refresh_from_db()
         non_expired_artifact.refresh_from_db()
-        empty_workflow.refresh_from_db()
+        empty_process.refresh_from_db()
 
         self.assertEqual(ponos_s3_mock.Object().delete.call_count, 2)
 
     def _make_revision_artifact(self):
         """
-        Create an artifact on an expired workflow and assign it to a worker version.
+        Create an artifact on an expired process and assign it to a worker version.
         """
-        workflow = Workflow.objects.create(farm=Farm.objects.first())
-        task = workflow.tasks.create(
+        process = Process.objects.create(
+            farm=Farm.objects.first(),
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
+        task = process.tasks.create(
             run=0,
             depth=0,
             slug='task',
@@ -514,9 +524,9 @@ class TestCleanupCommand(FixtureTestCase):
         return revision, artifact
 
     @patch('arkindex.ponos.models.s3')
-    def test_cleanup_expired_workflows_docker_images(self, ponos_s3_mock, s3_mock, rq_mock):
+    def test_cleanup_expired_processes_docker_images(self, ponos_s3_mock, s3_mock, rq_mock):
         """
-        Artifacts used as Docker images for worker versions from expired workflows
+        Artifacts used as Docker images for worker versions from expired processes
         should only be deleted if the versions are neither on Git tags or on main branches.
         """
         lonely_revision, lonely_artifact = self._make_revision_artifact()
@@ -539,17 +549,16 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 2 artifacts of expired workflows from S3…
+                Removing 2 artifacts of expired processes from S3…
                 Removing artifact s3_key
                 Removing artifact s3_key
-                Removing logs for 2 tasks of expired workflows from S3…
+                Removing logs for 2 tasks of expired processes from S3…
                 Removing task log s3_key
                 Removing task log s3_key
                 Updating 2 available worker versions to the Error state…
-                Removing 2 artifacts of expired workflows…
-                Removing 2 tasks of expired workflows…
-                Removing 2 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 2 artifacts of expired processes…
+                Removing 2 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -575,16 +584,14 @@ class TestCleanupCommand(FixtureTestCase):
             lonely_artifact.refresh_from_db()
         with self.assertRaises(Task.DoesNotExist):
             lonely_artifact.task.refresh_from_db()
-        with self.assertRaises(Workflow.DoesNotExist):
-            lonely_artifact.task.workflow.refresh_from_db()
         with self.assertRaises(Artifact.DoesNotExist):
             branch_artifact.refresh_from_db()
         with self.assertRaises(Task.DoesNotExist):
             branch_artifact.task.refresh_from_db()
-        with self.assertRaises(Workflow.DoesNotExist):
-            branch_artifact.task.workflow.refresh_from_db()
 
         # Those still exist, refreshing works
+        lonely_artifact.task.process.refresh_from_db()
+        branch_artifact.task.process.refresh_from_db()
         master_artifact.refresh_from_db()
         main_artifact.refresh_from_db()
         tagged_artifact.refresh_from_db()
@@ -597,13 +604,13 @@ class TestCleanupCommand(FixtureTestCase):
         self.assertEqual(ponos_s3_mock.Object().delete.call_count, 4)
 
     @patch('arkindex.ponos.models.s3')
-    def test_cleanup_expired_workflows_null(self, ponos_s3_mock, s3_mock, rq_mock):
+    def test_cleanup_expired_processes_null(self, ponos_s3_mock, s3_mock, rq_mock):
         repo = Repository.objects.get(url='http://my_repo.fake/workers/worker')
 
         # This revision on the `main` branch does not have any WorkerVersions.
-        # Improper handling of NULL values in the queries looking for expired workflows and
+        # Improper handling of NULL values in the queries looking for expired processes and
         # excluding revisions that should not be deleted based on the GitRefs could lead to
-        # this revision causing no expired workflows to ever be found.
+        # this revision causing no expired processes to ever be found.
         empty_revision = repo.revisions.create(
             hash=str(uuid.uuid4()),
             message='A revision with no worker versions',
@@ -637,17 +644,16 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 2 artifacts of expired workflows from S3…
+                Removing 2 artifacts of expired processes from S3…
                 Removing artifact s3_key
                 Removing artifact s3_key
-                Removing logs for 2 tasks of expired workflows from S3…
+                Removing logs for 2 tasks of expired processes from S3…
                 Removing task log s3_key
                 Removing task log s3_key
                 Updating 2 available worker versions to the Error state…
-                Removing 2 artifacts of expired workflows…
-                Removing 2 tasks of expired workflows…
-                Removing 2 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 2 artifacts of expired processes…
+                Removing 2 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -673,16 +679,14 @@ class TestCleanupCommand(FixtureTestCase):
             lonely_artifact.refresh_from_db()
         with self.assertRaises(Task.DoesNotExist):
             lonely_artifact.task.refresh_from_db()
-        with self.assertRaises(Workflow.DoesNotExist):
-            lonely_artifact.task.workflow.refresh_from_db()
         with self.assertRaises(Artifact.DoesNotExist):
             branch_artifact.refresh_from_db()
         with self.assertRaises(Task.DoesNotExist):
             branch_artifact.task.refresh_from_db()
-        with self.assertRaises(Workflow.DoesNotExist):
-            branch_artifact.task.workflow.refresh_from_db()
 
         # Those still exist, refreshing works
+        lonely_artifact.task.process.refresh_from_db()
+        branch_artifact.task.process.refresh_from_db()
         lonely_revision.refresh_from_db()
         branch_revision.refresh_from_db()
         empty_revision.refresh_from_db()
@@ -720,13 +724,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -774,20 +777,19 @@ class TestCleanupCommand(FixtureTestCase):
             image_no_element_old_2 = Image.objects.create(path='path/pathpathpath/img', width=12, height=12, server=img_server)
         image_no_element_new = Image.objects.create(path='path/pathpath/img', width=12, height=12, server=img_server)
 
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             self.assertEqual(
                 self.cleanup(),
                 dedent(
                     """
                     Removing orphaned Ponos artifacts…
                     Successfully cleaned up orphaned Ponos artifacts.
-                    Removing 0 artifacts of expired workflows from S3…
-                    Removing logs for 0 tasks of expired workflows from S3…
+                    Removing 0 artifacts of expired processes from S3…
+                    Removing logs for 0 tasks of expired processes from S3…
                     Updating 0 available worker versions to the Error state…
-                    Removing 0 artifacts of expired workflows…
-                    Removing 0 tasks of expired workflows…
-                    Removing 0 expired workflows…
-                    Successfully cleaned up expired workflows.
+                    Removing 0 artifacts of expired processes…
+                    Removing 0 tasks of expired processes…
+                    Successfully cleaned up expired processes.
                     Removing 0 old corpus exports from S3…
                     Removing 0 old corpus exports…
                     Successfully cleaned up old corpus exports.
@@ -819,17 +821,22 @@ class TestCleanupCommand(FixtureTestCase):
 
     @patch('arkindex.documents.management.commands.cleanup.s3')
     def test_cleanup_logs(self, cleanup_s3_mock, s3_mock, rq_mock):
-        workflow = Workflow.objects.create(farm=Farm.objects.first())
-        task = workflow.tasks.create(run=0, depth=0, slug='task')
+        process = Process.objects.create(
+            farm=Farm.objects.first(),
+            mode=ProcessMode.Workers,
+            corpus=self.corpus,
+            creator=self.superuser,
+        )
+        task = process.tasks.create(run=0, depth=0, slug='task')
 
         good_s3_log = MagicMock()
-        good_s3_log.key = f'{workflow.id}/run_{task.run}/{task.id}.log'
+        good_s3_log.key = f'{task.id}.log'
         orphan_s3_log = MagicMock()
-        orphan_s3_log.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_0/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log'
+        orphan_s3_log.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa.log'
         unsupported_s3_log = MagicMock()
         unsupported_s3_log.key = 'cant_touch_this.txt.vbs'
         broken_s3_log = MagicMock()
-        broken_s3_log.key = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_1/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log'
+        broken_s3_log.key = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log'
         broken_s3_log.delete.side_effect = ClientError({'Error': {'Code': '500'}}, 'delete_object')
 
         cleanup_s3_mock.Bucket.return_value.objects.all.side_effect = [
@@ -851,13 +858,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -870,9 +876,9 @@ class TestCleanupCommand(FixtureTestCase):
                 Removing orphaned local images…
                 Successfully cleaned up orphaned local images.
                 Removing orphaned Ponos logs…
-                Removing log aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_0/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log…
+                Removing log aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa.log…
                 Unsupported log cant_touch_this.txt.vbs
-                Removing log aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/run_1/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log…
+                Removing log bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb.log…
                 An error occurred (500) when calling the delete_object operation: Unknown
                 Successfully cleaned up orphaned Ponos logs.
                 Removing orphaned model versions archives…
@@ -931,13 +937,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -1010,13 +1015,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
@@ -1088,13 +1092,12 @@ class TestCleanupCommand(FixtureTestCase):
                 """
                 Removing orphaned Ponos artifacts…
                 Successfully cleaned up orphaned Ponos artifacts.
-                Removing 0 artifacts of expired workflows from S3…
-                Removing logs for 0 tasks of expired workflows from S3…
+                Removing 0 artifacts of expired processes from S3…
+                Removing logs for 0 tasks of expired processes from S3…
                 Updating 0 available worker versions to the Error state…
-                Removing 0 artifacts of expired workflows…
-                Removing 0 tasks of expired workflows…
-                Removing 0 expired workflows…
-                Successfully cleaned up expired workflows.
+                Removing 0 artifacts of expired processes…
+                Removing 0 tasks of expired processes…
+                Successfully cleaned up expired processes.
                 Removing 0 old corpus exports from S3…
                 Removing 0 old corpus exports…
                 Successfully cleaned up old corpus exports.
diff --git a/arkindex/documents/tests/test_bulk_classification.py b/arkindex/documents/tests/test_bulk_classification.py
index 65a39a16c9..c072997c09 100644
--- a/arkindex/documents/tests/test_bulk_classification.py
+++ b/arkindex/documents/tests/test_bulk_classification.py
@@ -296,7 +296,7 @@ class TestBulkClassification(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
         with self.assertNumQueries(5):
             response = self.client.post(
                 reverse('api:classification-bulk'),
@@ -394,7 +394,7 @@ class TestBulkClassification(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -441,7 +441,7 @@ class TestBulkClassification(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         self.assertNotEqual(self.worker_run.process_id, local_worker_run.process_id)
 
diff --git a/arkindex/documents/tests/test_bulk_element_transcriptions.py b/arkindex/documents/tests/test_bulk_element_transcriptions.py
index d6a2a15853..89a85f8c94 100644
--- a/arkindex/documents/tests/test_bulk_element_transcriptions.py
+++ b/arkindex/documents/tests/test_bulk_element_transcriptions.py
@@ -796,7 +796,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
@@ -829,7 +829,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(14):
             response = self.client.post(
@@ -872,7 +872,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(14):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_bulk_elements.py b/arkindex/documents/tests/test_bulk_elements.py
index e2d3a5f0f6..c76e407283 100644
--- a/arkindex/documents/tests/test_bulk_elements.py
+++ b/arkindex/documents/tests/test_bulk_elements.py
@@ -444,7 +444,7 @@ class TestBulkElements(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
         payload = {**self.payload, 'worker_run_id': str(other_worker_run.id)}
 
         with self.assertNumQueries(5):
@@ -510,7 +510,7 @@ class TestBulkElements(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
         payload = {**self.payload, 'worker_run_id': str(self.worker_run.id)}
 
         with self.assertNumQueries(12):
@@ -559,7 +559,7 @@ class TestBulkElements(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         payload = {**self.payload, 'worker_run_id': str(local_worker_run.id)}
 
diff --git a/arkindex/documents/tests/test_bulk_transcription_entities.py b/arkindex/documents/tests/test_bulk_transcription_entities.py
index 4a94951023..0168cf737f 100644
--- a/arkindex/documents/tests/test_bulk_transcription_entities.py
+++ b/arkindex/documents/tests/test_bulk_transcription_entities.py
@@ -240,7 +240,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
 
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
         with self.assertNumQueries(6):
             response = self.client.post(
                 reverse('api:transcription-entities-bulk', kwargs={'pk': str(self.transcription.id)}),
@@ -358,7 +358,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(12):
             response = self.client.post(
@@ -396,7 +396,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(12):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_bulk_transcriptions.py b/arkindex/documents/tests/test_bulk_transcriptions.py
index faa8f13833..14bf6ea6bc 100644
--- a/arkindex/documents/tests/test_bulk_transcriptions.py
+++ b/arkindex/documents/tests/test_bulk_transcriptions.py
@@ -287,7 +287,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(2):
             response = self.client.post(
@@ -408,7 +408,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         element = self.corpus.elements.get(name='Volume 2, page 1r')
         self.assertFalse(element.transcriptions.exists())
@@ -463,7 +463,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
         element = self.corpus.elements.get(name='Volume 2, page 1r')
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_classification.py b/arkindex/documents/tests/test_classification.py
index 75fc7cef97..c07b0ba823 100644
--- a/arkindex/documents/tests/test_classification.py
+++ b/arkindex/documents/tests/test_classification.py
@@ -261,7 +261,7 @@ class TestClassifications(FixtureAPITestCase):
         local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
         local_worker_run = local_process.worker_runs.get()
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -302,7 +302,7 @@ class TestClassifications(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -389,7 +389,7 @@ class TestClassifications(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_create_elements.py b/arkindex/documents/tests/test_create_elements.py
index 7b6ae2a6f3..683cdcae13 100644
--- a/arkindex/documents/tests/test_create_elements.py
+++ b/arkindex/documents/tests/test_create_elements.py
@@ -579,7 +579,7 @@ class TestCreateElements(FixtureAPITestCase):
 
     def test_worker_run_task(self):
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(10):
             response = self.client.post(
@@ -614,7 +614,7 @@ class TestCreateElements(FixtureAPITestCase):
         local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
         local_worker_run = local_process.worker_runs.get()
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(10):
             response = self.client.post(
@@ -696,7 +696,7 @@ class TestCreateElements(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_create_transcriptions.py b/arkindex/documents/tests/test_create_transcriptions.py
index 5805975cc6..83184bc6f9 100644
--- a/arkindex/documents/tests/test_create_transcriptions.py
+++ b/arkindex/documents/tests/test_create_transcriptions.py
@@ -224,7 +224,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
         local_worker_run = local_process.worker_runs.get()
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -260,7 +260,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -346,7 +346,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(7):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_entities_api.py b/arkindex/documents/tests/test_entities_api.py
index 821a66f72b..67bfe0bd0c 100644
--- a/arkindex/documents/tests/test_entities_api.py
+++ b/arkindex/documents/tests/test_entities_api.py
@@ -499,7 +499,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_version_1, parents=[])
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
@@ -585,7 +585,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(9):
             response = self.client.post(
@@ -620,7 +620,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(9):
             response = self.client.post(
@@ -1048,7 +1048,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_version_1, parents=[])
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(6):
             response = self.client.post(
@@ -1075,7 +1075,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
@@ -1110,7 +1110,7 @@ class TestEntitiesAPI(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run_1.process.start()
-        task = self.worker_run_1.process.workflow.tasks.first()
+        task = self.worker_run_1.process.tasks.first()
 
         with self.assertNumQueries(8):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_metadata.py b/arkindex/documents/tests/test_metadata.py
index 4bdc7903f1..1da1c40b7c 100644
--- a/arkindex/documents/tests/test_metadata.py
+++ b/arkindex/documents/tests/test_metadata.py
@@ -39,7 +39,7 @@ class TestMetaData(FixtureAPITestCase):
         cls.process = cls.corpus.processes.create(mode=ProcessMode.Workers, creator=cls.user)
         cls.process.worker_runs.create(version=cls.worker_version, parents=[])
         cls.process.start()
-        cls.task = cls.process.workflow.tasks.first()
+        cls.task = cls.process.tasks.first()
 
     def setUp(self):
         super().setUp()
@@ -497,7 +497,7 @@ class TestMetaData(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
         with self.assertNumQueries(5):
             response = self.client.post(
                 reverse('api:element-metadata', kwargs={'pk': str(self.vol.id)}),
@@ -546,7 +546,7 @@ class TestMetaData(FixtureAPITestCase):
         when authenticated as a Ponos task of this process
         """
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(12):
             response = self.client.post(
@@ -570,7 +570,7 @@ class TestMetaData(FixtureAPITestCase):
         local_worker_run = local_process.worker_runs.get()
 
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(12):
             response = self.client.post(
@@ -1495,7 +1495,7 @@ class TestMetaData(FixtureAPITestCase):
         )
         other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
         self.worker_run.process.start()
-        task = self.worker_run.process.workflow.tasks.first()
+        task = self.worker_run.process.tasks.first()
 
         with self.assertNumQueries(5):
             response = self.client.post(
diff --git a/arkindex/documents/tests/test_retrieve_elements.py b/arkindex/documents/tests/test_retrieve_elements.py
index d3522b6a64..1433be4a4a 100644
--- a/arkindex/documents/tests/test_retrieve_elements.py
+++ b/arkindex/documents/tests/test_retrieve_elements.py
@@ -120,7 +120,7 @@ class TestRetrieveElements(FixtureAPITestCase):
             creator=self.user,
         )
         process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
 
         self.assertTrue(self.vol.type.folder)
         response = self.client.get(
diff --git a/arkindex/images/tests/test_image_api.py b/arkindex/images/tests/test_image_api.py
index 0f543cf726..a7fd7521e5 100644
--- a/arkindex/images/tests/test_image_api.py
+++ b/arkindex/images/tests/test_image_api.py
@@ -612,7 +612,7 @@ class TestImageApi(FixtureAPITestCase):
             creator=self.user,
         )
         process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         # The user scope should not be necessary with Ponos task authentication
         self.assertFalse(self.user.user_scopes.filter(scope=Scope.CreateIIIFImage).exists())
 
@@ -658,7 +658,7 @@ class TestImageApi(FixtureAPITestCase):
             creator=self.user,
         )
         process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         # The user scope should not be necessary with Ponos task authentication
         self.assertFalse(self.user.user_scopes.filter(scope=Scope.CreateIIIFImage).exists())
 
diff --git a/arkindex/ponos/admin.py b/arkindex/ponos/admin.py
index d6b6d3a236..1f4cfcda90 100644
--- a/arkindex/ponos/admin.py
+++ b/arkindex/ponos/admin.py
@@ -1,11 +1,10 @@
 from django import forms
 from django.contrib import admin, messages
 from django.core.exceptions import ValidationError
-from django.db.models import Max, TextField
 from enumfields.admin import EnumFieldListFilter
 
 from arkindex.ponos.keys import gen_nonce
-from arkindex.ponos.models import FINAL_STATES, GPU, Agent, Artifact, Farm, Secret, Task, Workflow, encrypt
+from arkindex.ponos.models import FINAL_STATES, GPU, Agent, Artifact, Farm, Secret, Task, encrypt
 
 
 class ArtifactInline(admin.TabularInline):
@@ -21,7 +20,7 @@ class TaskAdmin(admin.ModelAdmin):
         "slug",
         "run",
         "state",
-        "workflow_id",
+        "process_id",
         "updated",
         "agent",
         "tags",
@@ -50,7 +49,7 @@ class TaskAdmin(admin.ModelAdmin):
                     "run",
                     "depth",
                     "state",
-                    "workflow",
+                    "process",
                     "agent",
                     "tags",
                     "priority",
@@ -83,43 +82,6 @@ class TaskInline(admin.TabularInline):
     extra = 0
 
 
-def workflow_retry(modeladmin, request, queryset):
-    """
-    Retry selected workflows
-    """
-    for w in queryset.all():
-        w.retry()
-
-
-class WorkflowAdmin(admin.ModelAdmin):
-    list_display = ("id", "updated", "state")
-    actions = (workflow_retry,)
-    readonly_fields = (
-        "id",
-        "state",
-    )
-    inlines = [
-        TaskInline,
-    ]
-
-    # Use a monospace font for the workflow recipe
-    formfield_overrides = {
-        TextField: {
-            "widget": admin.widgets.AdminTextareaWidget(
-                attrs={"style": "font-family: monospace"}
-            )
-        },
-    }
-
-    def get_queryset(self, *args, **kwargs):
-        return (
-            super()
-            .get_queryset(*args, **kwargs)
-            .prefetch_related("tasks")
-            .annotate(last_run=Max("tasks__run"))
-        )
-
-
 class GPUInline(admin.TabularInline):
     model = GPU
     fields = ("id", "name", "index", "ram_total")
@@ -266,8 +228,6 @@ class SecretAdmin(admin.ModelAdmin):
 
 
 admin.site.register(Task, TaskAdmin)
-admin.site.register(Workflow, WorkflowAdmin)
 admin.site.register(Agent, AgentAdmin)
 admin.site.register(Farm, FarmAdmin)
 admin.site.register(Secret, SecretAdmin)
-workflow_retry.short_description = "Retry all selected workflows (a new run is created)"
diff --git a/arkindex/ponos/api.py b/arkindex/ponos/api.py
index b829cdbec0..7b96670b70 100644
--- a/arkindex/ponos/api.py
+++ b/arkindex/ponos/api.py
@@ -3,11 +3,11 @@ import uuid
 from collections import defaultdict
 from textwrap import dedent
 
-from django.db.models import Count, Max, Q
+from django.db.models import Count, Q
 from django.shortcuts import get_object_or_404, redirect
 from django.utils import timezone
 from drf_spectacular.utils import OpenApiExample, extend_schema, extend_schema_view
-from rest_framework.exceptions import NotFound, PermissionDenied, ValidationError
+from rest_framework.exceptions import ValidationError
 from rest_framework.generics import (
     CreateAPIView,
     ListAPIView,
@@ -16,13 +16,12 @@ from rest_framework.generics import (
     RetrieveUpdateAPIView,
     UpdateAPIView,
 )
-from rest_framework.permissions import SAFE_METHODS
 from rest_framework.response import Response
 from rest_framework.views import APIView
 
 from arkindex.ponos.authentication import AgentAuthentication, TaskAuthentication
 from arkindex.ponos.keys import load_private_key
-from arkindex.ponos.models import Agent, Artifact, Farm, Secret, State, Task, Workflow
+from arkindex.ponos.models import Agent, Artifact, Farm, Secret, State, Task
 from arkindex.ponos.permissions import (
     IsAgent,
     IsAgentOrArtifactAdmin,
@@ -45,12 +44,8 @@ from arkindex.ponos.serializers import (
     TaskDefinitionSerializer,
     TaskSerializer,
     TaskTinySerializer,
-    WorkflowSerializer,
 )
-from arkindex.process.models import Process
-from arkindex.project.mixins import ProcessACLMixin
 from arkindex.project.permissions import IsVerified
-from arkindex.users.models import Role
 from rest_framework_simplejwt.views import TokenRefreshView
 
 
@@ -87,60 +82,6 @@ class PublicKeyEndpoint(APIView):
         return Response(load_private_key().public_key())
 
 
-@extend_schema(tags=["ponos"])
-@extend_schema_view(
-    get=extend_schema(description=dedent("""
-        Retrieve a Ponos workflow status.
-
-        Requires **guest** access to its process.
-    """).strip()),
-    put=extend_schema(description=dedent("""
-        Update a workflow's status and tasks.
-
-        Requires **admin** access to its process.
-    """).strip()),
-    patch=extend_schema(description=dedent("""
-        Partially update a workflow's status and tasks.
-
-        Requires **admin** access to its process.
-    """).strip()),
-)
-class WorkflowDetails(ProcessACLMixin, RetrieveUpdateAPIView):
-    """
-    Retrieve information about a workflow, or update its state.
-    Updating a workflow's state to :attr:`~arkindex.ponos.models.State.Stopping` will cause it to stop.
-    """
-
-    permission_classes = (IsVerified, )
-    queryset = Workflow.objects.prefetch_related("tasks__parents").annotate(
-        last_run=Max("tasks__run")
-    )
-    serializer_class = WorkflowSerializer
-
-    def perform_update(self, serializer):
-        serializer.instance.stop()
-
-    def check_object_permissions(self, request, workflow):
-        super().check_object_permissions(request, workflow)
-        if self.request.user.is_admin:
-            return
-
-        required_access = Role.Admin.value
-        # Allow a user with a read access on the process to retrieve its workflow
-        if request.method in SAFE_METHODS:
-            required_access = Role.Guest.value
-
-        process = get_object_or_404(
-            Process,
-            workflow_id=workflow.id
-        )
-        access_level = self.process_access_level(process)
-        if not access_level:
-            raise NotFound
-        if access_level < required_access:
-            raise PermissionDenied(detail='You do not have an admin access to this process.')
-
-
 @extend_schema(tags=["ponos"])
 @extend_schema_view(
     get=extend_schema(
@@ -291,7 +232,7 @@ class TaskDefinition(RetrieveAPIView):
 
     # We need to specify the default database to avoid stale reads
     # when a task is updated by an agent, then the agent immediately fetches its definition
-    queryset = Task.objects.using("default").select_related("workflow")
+    queryset = Task.objects.using("default").select_related("process")
     permission_classes = (IsAgent,)
     serializer_class = TaskDefinitionSerializer
 
@@ -322,7 +263,7 @@ class TaskArtifacts(ListCreateAPIView):
     def get_task(self):
         task = get_object_or_404(
             # Select the required tables for permissions checking
-            Task.objects.select_related('workflow__process__corpus', 'workflow__process__revision'),
+            Task.objects.select_related('process__corpus', 'process__revision'),
             pk=self.kwargs["pk"],
         )
         self.check_object_permissions(self.request, task)
diff --git a/arkindex/ponos/authentication.py b/arkindex/ponos/authentication.py
index 5de8559d8c..0836aecba9 100644
--- a/arkindex/ponos/authentication.py
+++ b/arkindex/ponos/authentication.py
@@ -1,4 +1,3 @@
-from django.core.exceptions import ObjectDoesNotExist
 from drf_spectacular.authentication import TokenScheme
 from drf_spectacular.contrib.rest_framework_simplejwt import SimpleJWTScheme
 from rest_framework.authentication import TokenAuthentication
@@ -79,29 +78,21 @@ class TaskAuthentication(TokenAuthentication):
 
     def authenticate_credentials(self, key):
         try:
-            task = Task.objects.select_related('workflow__process__creator').get(token=key)
+            task = Task.objects.select_related('process__creator', 'process__corpus').get(token=key)
         except Task.DoesNotExist:
             # Same error message as the standard TokenAuthentication
             raise AuthenticationFailed('Invalid token.')
 
-        # There is no Workflow.process_id, since the FK is on Process.workflow_id,
-        # and accessing Workflow.process when there is no process causes an exception
-        # instead of returning None.
-        try:
-            process = task.workflow.process
-        except ObjectDoesNotExist:
-            raise AuthenticationFailed('Task has no process.')
-
         # Add the process ID to Sentry tags, to speed up troubleshooting
         # by identifying if and which process is affected by the error.
-        set_tag("process.id", str(process.id))
+        set_tag("process_id", str(task.process_id))
 
-        if not process.creator_id or not process.creator.is_active:
+        if not task.process.creator_id or not task.process.creator.is_active:
             # Same error message as the standard TokenAuthentication
             raise AuthenticationFailed('User inactive or deleted.')
 
         # Must return a 2-tuple that will be set as (self.request.user, self.request.auth)
-        return (process.creator, task)
+        return (task.process.creator, task)
 
 
 class TaskAuthenticationExtension(TokenScheme):
diff --git a/arkindex/ponos/migrations/0001_initial.py b/arkindex/ponos/migrations/0001_initial.py
index 878369265d..4e9ea3b989 100644
--- a/arkindex/ponos/migrations/0001_initial.py
+++ b/arkindex/ponos/migrations/0001_initial.py
@@ -11,8 +11,8 @@ from django.db import migrations, models
 
 import arkindex.ponos.keys
 import arkindex.ponos.models
-import arkindex.ponos.validators
 import arkindex.project.fields
+import arkindex.project.validators
 
 
 class Migration(migrations.Migration):
@@ -45,7 +45,7 @@ class Migration(migrations.Migration):
             fields=[
                 ('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
                 ('path', models.CharField(max_length=500)),
-                ('size', models.BigIntegerField(validators=[django.core.validators.MinValueValidator(1), arkindex.ponos.validators.MaxValueValidator(arkindex.ponos.models.artifact_max_size)])),
+                ('size', models.BigIntegerField(validators=[django.core.validators.MinValueValidator(1), arkindex.project.validators.MaxValueValidator(arkindex.ponos.models.artifact_max_size)])),
                 ('content_type', models.CharField(default='application/octet-stream', max_length=250)),
                 ('created', models.DateTimeField(auto_now_add=True)),
                 ('updated', models.DateTimeField(auto_now=True)),
diff --git a/arkindex/ponos/migrations/0002_task_process.py b/arkindex/ponos/migrations/0002_task_process.py
new file mode 100644
index 0000000000..49b82f7899
--- /dev/null
+++ b/arkindex/ponos/migrations/0002_task_process.py
@@ -0,0 +1,57 @@
+# Generated by Django 4.1.7 on 2023-06-01 09:44
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('process', '0004_process_farm_started_finished'),
+        ('ponos', '0001_initial'),
+    ]
+
+    operations = [
+        # Make the workflow ID nullable on a task.
+        # This is necessary when reverting: Django would otherwise recreate the Task.workflow field as non-nullable,
+        # causing errors on all existing tasks before we can recreate workflows in the data migration.
+        migrations.AlterField(
+            model_name='task',
+            name='workflow',
+            field=models.ForeignKey(
+                on_delete=models.CASCADE,
+                related_name='tasks',
+                to='ponos.Workflow',
+                blank=True,
+                null=True,
+            )
+        ),
+        # Add Task.process as a nullable field, which we make non-nullable only after the data migration.
+        migrations.AddField(
+            model_name='task',
+            name='process',
+            field=models.ForeignKey(
+                to='process.Process',
+                on_delete=models.CASCADE,
+                related_name='tasks',
+                blank=True,
+                null=True,
+            ),
+        ),
+        migrations.AlterModelOptions(
+            name='task',
+            options={'ordering': ('process_id', 'run', 'depth', 'slug')},
+        ),
+        migrations.AlterUniqueTogether(
+            name='task',
+            unique_together=set(),
+        ),
+        migrations.AddConstraint(
+            model_name='task',
+            constraint=models.UniqueConstraint(
+                models.F('process_id'),
+                models.F('run'),
+                models.F('slug'),
+                name='unique_process_run_slug',
+            ),
+        ),
+    ]
diff --git a/arkindex/ponos/migrations/0003_remove_workflow.py b/arkindex/ponos/migrations/0003_remove_workflow.py
new file mode 100644
index 0000000000..cbf88e0979
--- /dev/null
+++ b/arkindex/ponos/migrations/0003_remove_workflow.py
@@ -0,0 +1,31 @@
+# Generated by Django 4.1.7 on 2023-06-01 10:09
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('process', '0006_remove_process_workflow'),
+        ('ponos', '0002_task_process'),
+    ]
+
+    operations = [
+        migrations.RemoveField(
+            model_name='task',
+            name='workflow',
+        ),
+        # Make Task.process non-nullable
+        migrations.AlterField(
+            model_name='task',
+            name='process',
+            field=models.ForeignKey(
+                on_delete=models.CASCADE,
+                related_name='tasks',
+                to='process.process',
+            ),
+        ),
+        migrations.DeleteModel(
+            name='Workflow',
+        ),
+    ]
diff --git a/arkindex/ponos/models.py b/arkindex/ponos/models.py
index 6aa795fe4f..4e51bd87bf 100644
--- a/arkindex/ponos/models.py
+++ b/arkindex/ponos/models.py
@@ -23,8 +23,8 @@ from enumfields import Enum, EnumField
 from arkindex.ponos.aws import object_url, s3
 from arkindex.ponos.keys import gen_nonce
 from arkindex.ponos.managers import TaskManager
-from arkindex.ponos.validators import MaxValueValidator
 from arkindex.project.fields import ArrayField
+from arkindex.project.validators import MaxValueValidator
 from rest_framework_simplejwt.tokens import RefreshToken
 
 # Maximum allowed time until an agent is considered inactive since last request
@@ -169,7 +169,7 @@ class Agent(models.Model):
 
         # Filter pending task on the agent farm ordered by higher priority first and then seniority (older first)
         pending_tasks = Task.objects.filter(
-            Q(workflow__farm_id=self.farm_id)
+            Q(process__farm_id=self.farm_id)
             & Q(
                 Q(state=State.Pending, agent=None) | Q(state=State.Unscheduled, depth=0)
             )
@@ -336,10 +336,10 @@ ACTIVE_STATES = (
 )
 
 # Tasks priority to determine the overall state of multiple tasks
-# If there are failed tasks, the workflow is failed.
-# Else, if there are any errors, the workflow errored.
+# If there are failed tasks, the process is failed.
+# Else, if there are any errors, the process errored.
 # If there are any running tasks, no matter in which state another task may be,
-# the workflow is set as running to prevent retrying and allow stopping.
+# the process is set as running to prevent retrying and allow stopping.
 STATES_ORDERING = [
     State.Running,
     State.Failed,
@@ -352,216 +352,6 @@ STATES_ORDERING = [
 ]
 
 
-class Workflow(models.Model):
-    """
-    A group of tasks that can depend on each other.
-    """
-
-    id = models.UUIDField(default=uuid.uuid4, primary_key=True)
-    created = models.DateTimeField(auto_now_add=True)
-    updated = models.DateTimeField(auto_now=True)
-    finished = models.DateTimeField(blank=True, null=True)
-    farm = models.ForeignKey(
-        to="ponos.Farm", related_name="workflows", on_delete=models.PROTECT
-    )
-
-    class Meta:
-        ordering = ("-updated",)
-        constraints = [
-            # A workflow cannot be finished before it is created
-            models.CheckConstraint(
-                check=models.Q(finished=None)
-                | models.Q(finished__gte=models.F("created")),
-                name="ponos_workflow_finished_after_created",
-            )
-        ]
-
-    def __str__(self) -> str:
-        return str(self.id)
-
-    def get_absolute_url(self) -> str:
-        """
-        :returns: URL to the workflow details API for this workflow.
-        """
-        return reverse("api:workflow-details", args=[self.id])
-
-    @property
-    def state(self) -> State:
-        """
-        Deduce the workflow's state from the tasks of its latest run.
-        A workflow's state is deduced by picking the first state that any tasks in this run have, in this order:
-
-        #. :attr:`~State.Running`
-        #. :attr:`~State.Failed`
-        #. :attr:`~State.Error`
-        #. :attr:`~State.Stopping`
-        #. :attr:`~State.Stopped`
-        #. :attr:`~State.Pending`
-        #. :attr:`~State.Unscheduled`
-        """
-        return self.get_state(self.get_last_run())
-
-    def get_state(self, run):
-        """
-        A separate method to get a workflow's state on a given run.
-
-        Most users will only use the :meth:`Workflow.state` property to get the state for a workflow's last run.
-        However, when trying to get a state for many workflows at once, using ``.annotate(last_run=Max('tasks__run'))``
-        and using the annotation with this method will prevent many useless SQL requests.
-
-        Further performance improvements can be achieved with ``prefetch_related('tasks')``.
-        """
-        # Negative run numbers never have tasks
-        if run < 0:
-            return State.Unscheduled
-
-        # This prevents performing another SQL request when tasks have already been prefetched.
-        # See https://stackoverflow.com/a/19651840/5990435
-        if (
-            hasattr(self, "_prefetched_objects_cache")
-            and self.tasks.field.remote_field.get_cache_name()
-            in self._prefetched_objects_cache
-        ):
-            task_states = set(t.state for t in self.tasks.all() if t.run == run)
-        else:
-            task_states = set(
-                self.tasks.filter(run=run).values_list("state", flat=True)
-            )
-
-        # This run has no tasks
-        if not task_states:
-            return State.Unscheduled
-
-        # All tasks have the same state
-        if len(task_states) == 1:
-            return task_states.pop()
-
-        for state in STATES_ORDERING:
-            if state in task_states:
-                return state
-
-        raise NotImplementedError("Something went wrong")
-
-    def get_last_run(self) -> int:
-        """
-        Get the last run number. If the ``last_run`` attribute is defined on this workflow,
-        possibly from a ``.annotate(last_run=Max('tasks__run'))`` annotation in a Django QuerySet,
-        this method will return the attribute's value instead of making another SQL request.
-        """
-        if hasattr(self, "last_run"):
-            if self.last_run is None:
-                return -1
-            return self.last_run
-
-        if not self.tasks.exists():
-            return -1
-
-        self.last_run = self.tasks.all().aggregate(models.Max("run"))["run__max"]
-        return self.last_run
-
-    def is_final(self) -> bool:
-        """
-        Helper to tell whether a workflow is final.
-        A workflow is considered final when it is in one of the final states:
-        :attr:`~State.Completed`, :attr:`~State.Failed`, :attr:`~State.Error`, :attr:`~State.Stopped`
-
-        :returns: Whether or not the workflow is considered final.
-        """
-        return self.state in FINAL_STATES
-
-    @property
-    def expiry(self):
-        """
-        A workflow's expiry date. This is the latest expiry date of its tasks.
-        No action is taken when a workflow is expired.
-
-        :returns: The latest expiry date of the workflow's tasks, or None if there are no tasks.
-        :rtype: datetime or None
-        """
-        # This prevents performing another SQL request when tasks have already been prefetched.
-        # See https://stackoverflow.com/a/19651840/5990435
-        if (
-            hasattr(self, "_prefetched_objects_cache")
-            and self.tasks.field.remote_field.get_cache_name()
-            in self._prefetched_objects_cache
-        ):
-            return max(t.expiry for t in self.tasks.all())
-        else:
-            return self.tasks.aggregate(models.Max("expiry"))["expiry__max"]
-
-    def retry(self):
-        """
-        Create new :class:`Task` instances with a new run number and resets the completion date.
-
-        :raises AssertionError: If the workflow is not in a final state.
-        :returns: A dict mapping task slugs to task instances.
-        :rtype: dict
-        """
-        last_run = self.get_last_run()
-        assert self.is_final()
-
-        new_tasks = {}
-        task_parents = {}
-        for task in self.tasks.all():
-            new_task = Task(
-                run=last_run + 1,
-                depth=task.depth,
-                slug=task.slug,
-                priority=task.priority,
-                tags=task.tags,
-                image=task.image,
-                shm_size=task.shm_size,
-                command=task.command,
-                env=task.env,
-                has_docker_socket=task.has_docker_socket,
-                image_artifact=task.image_artifact,
-                agent_id=task.agent_id,
-                requires_gpu=task.requires_gpu,
-                gpu_id=task.gpu_id,
-                workflow_id=task.workflow_id,
-                container=task.container,
-                extra_files=task.extra_files
-            )
-
-            # Set the task token on the new task
-            new_task.env['ARKINDEX_TASK_TOKEN'] = new_task.token
-
-            task_parents[new_task.slug] = list(task.parents.values_list('slug', flat=True))
-            new_tasks[task.slug] = new_task
-
-        Task.objects.bulk_create(new_tasks.values())
-
-        # Add parents once all the tasks are created
-        for task in new_tasks.values():
-            if task_parents[task.slug]:
-                parents = [new_tasks[slug] for slug in task_parents[task.slug]]
-                task.parents.set(parents)
-
-        # setting last_run so that subsequent calls to get_last_run do not require db queries
-        self.last_run = last_run + 1
-        self.finished = None
-        self.save()
-        return new_tasks
-
-    def stop(self) -> None:
-        """
-        Fully stop the workflow by updating every running task to the :attr:`~State.Stopping` state,
-        and every unscheduled task to the :attr:`~State.Stopped` state.
-        """
-        assert not self.is_final()
-        stopping_count = Task.objects.filter(
-            workflow=self, state__in=[State.Pending, State.Running]
-        ).update(state=State.Stopping)
-        Task.objects.filter(workflow=self, state=State.Unscheduled).update(
-            state=State.Stopped
-        )
-        # If all the tasks are immediately stopped, then UpdateTask will not be able to update
-        # the finished attribute, so we do it here.
-        if not stopping_count:
-            self.finished = timezone.now()
-            self.save()
-
-
 def expiry_default():
     """
     Default value for Task.expiry.
@@ -581,9 +371,6 @@ def task_token_default():
 
 
 class Task(models.Model):
-    """
-    A task created from a workflow's recipe.
-    """
 
     id = models.UUIDField(default=uuid.uuid4, primary_key=True)
     run = models.PositiveIntegerField()
@@ -621,8 +408,8 @@ class Task(models.Model):
         null=True,
         on_delete=models.SET_NULL,
     )
-    workflow = models.ForeignKey(
-        Workflow,
+    process = models.ForeignKey(
+        'process.Process',
         related_name="tasks",
         on_delete=models.CASCADE,
     )
@@ -655,9 +442,14 @@ class Task(models.Model):
     objects = TaskManager()
 
     class Meta:
-        unique_together = (("workflow", "run", "slug"),)
-        ordering = ("workflow", "run", "depth", "slug")
+        ordering = ("process_id", "run", "depth", "slug")
         constraints = [
+            models.UniqueConstraint(
+                'process_id',
+                'run',
+                'slug',
+                name='unique_process_run_slug',
+            ),
             models.UniqueConstraint(
                 'gpu',
                 condition=Q(state__in=ACTIVE_STATES),
@@ -670,7 +462,7 @@ class Task(models.Model):
 
     def get_absolute_url(self) -> str:
         """
-        :returns: URL to the workflow details API for this workflow.
+        :returns: URL to the task details API for this task.
         """
         return reverse("api:task-details", args=[self.id])
 
@@ -680,7 +472,7 @@ class Task(models.Model):
         A task is considered final when it is in one of the final states:
         :attr:`~State.Completed`, :attr:`~State.Failed`, :attr:`~State.Error`, :attr:`~State.Stopped`
 
-        :returns: Whether or not the workflow is considered final.
+        :returns: Whether or not the task is considered final.
         """
         return self.state in FINAL_STATES
 
@@ -692,11 +484,7 @@ class Task(models.Model):
         """
         return s3.Object(
             settings.PONOS_S3_LOGS_BUCKET,
-            os.path.join(
-                str(self.workflow.id),
-                "run_{}".format(self.run),
-                "{!s}.log".format(self.id),
-            ),
+            f"{self.id}.log",
         )
 
     @cached_property
@@ -805,7 +593,6 @@ class Artifact(models.Model):
         return s3.Object(
             settings.PONOS_S3_ARTIFACTS_BUCKET,
             os.path.join(
-                str(self.task.workflow_id),
                 str(self.task.id),
                 str(self.path),
             ),
diff --git a/arkindex/ponos/permissions.py b/arkindex/ponos/permissions.py
index 4fdb86dbf1..d95af88541 100644
--- a/arkindex/ponos/permissions.py
+++ b/arkindex/ponos/permissions.py
@@ -58,7 +58,7 @@ class IsAgentOrTaskAdmin(CorpusACLMixin, IsAuthenticated):
     """
     Permission to access a task with high privilege
 
-    Allowed for admins, agents, creators of the process whose workflow contains the task,
+    Allowed for admins, agents, creators of the task's process,
     and users with an admin right on the process' corpus.
     """
 
@@ -69,9 +69,9 @@ class IsAgentOrTaskAdmin(CorpusACLMixin, IsAuthenticated):
         return (
             require_agent_or_admin(request, view)
             or (
-                task.workflow.process is not None
-                and task.workflow.process.corpus_id is not None
-                and self.has_admin_access(task.workflow.process.corpus)
+                task.process is not None
+                and task.process.corpus_id is not None
+                and self.has_admin_access(task.process.corpus)
             )
         )
 
@@ -94,7 +94,7 @@ class IsAgentOrTaskAdminOrReadOnly(ProcessACLMixin, IsAuthenticated):
         # Add request to attributes for the ACL mixin to work with self.user
         self.request = request
         try:
-            level = self.process_access_level(task.workflow.process)
+            level = self.process_access_level(task.process)
         except Process.DoesNotExist:
             # Reject if the task has no process
             return False
diff --git a/arkindex/ponos/serializers.py b/arkindex/ponos/serializers.py
index 50945e8d2b..cde2fdbd7e 100644
--- a/arkindex/ponos/serializers.py
+++ b/arkindex/ponos/serializers.py
@@ -21,7 +21,6 @@ from arkindex.ponos.models import (
     Secret,
     State,
     Task,
-    Workflow,
     task_token_default,
 )
 from arkindex.ponos.serializer_fields import Base64Field, PublicKeyField
@@ -93,7 +92,7 @@ class AgentLightSerializer(serializers.ModelSerializer):
 class TaskLightSerializer(serializers.ModelSerializer):
     """
     Serializes a :class:`~arkindex.ponos.models.Task` instance without logs or agent information.
-    Used to list tasks inside a workflow.
+    Used to list tasks inside a process.
     """
 
     state = EnumField(State)
@@ -187,18 +186,18 @@ class TaskSerializer(TaskLightSerializer):
                     child.state = State.Pending
                     child.save()
 
-            # This task has no children: this might be the last task of the workflow, so the workflow might be finished.
-            # If all tasks in the current run are finished, update the completion date of the workflow.
+            # This task has no children: this might be the last task of the process, so the process might be finished.
+            # If all tasks in the current run are finished, update the completion date of the process.
             if (
                 not children
                 and not Task.objects.filter(
-                    workflow_id=instance.workflow_id, run=instance.run
+                    process_id=instance.process_id, run=instance.run
                 )
                 .exclude(state__in=FINAL_STATES)
                 .exists()
             ):
-                instance.workflow.finished = timezone.now()
-                instance.workflow.save()
+                instance.process.finished = timezone.now()
+                instance.process.save()
 
         # We already checked earlier that the task was in a final state.
         # If this state is both final and not completed, then we should trigger the task failure signal.
@@ -233,6 +232,10 @@ class TaskTinySerializer(TaskSerializer):
                 )
             instance.agent = None
 
+            # Un-finish the process since a task will run again
+            instance.process.finished = None
+            instance.process.save()
+
         if new_state in FINAL_STATES and new_state != State.Completed:
             task_failure.send_robust(self.__class__, task=instance)
 
@@ -377,46 +380,6 @@ class AgentCreateSerializer(serializers.ModelSerializer):
         return data
 
 
-class WorkflowSerializer(serializers.ModelSerializer):
-    """
-    Serializes a :class:`~arkindex.ponos.models.Workflow` instance with its tasks.
-    """
-
-    tasks = TaskLightSerializer(many=True, read_only=True)
-    farm = FarmSerializer(read_only=True)
-    state = EnumField(State)
-
-    class Meta:
-        model = Workflow
-        fields = (
-            "id",
-            "created",
-            "finished",
-            "state",
-            "farm",
-            "tasks",
-        )
-        read_only_fields = (
-            "id",
-            "created",
-            "finished",
-            "farm",
-            "tasks",
-        )
-
-    def validate_state(self, state: State) -> None:
-        """
-        When performing updates, prevents updating to any state other than :attr:`~arkindex.ponos.models.State.Stopping`
-        and restricts to :attr:`~arkindex.ponos.models.State.Pending` or :attr:`~arkindex.ponos.models.State.Running` workflows.
-        """
-        if state != State.Stopping:
-            raise ValidationError("Can only change the state to 'stopping'")
-        if self.instance.state not in (State.Pending, State.Running):
-            raise ValidationError(
-                "Cannot stop a {} workflow".format(self.instance.state.value)
-            )
-
-
 class ActionSerializer(serializers.Serializer):
     """
     Serializes an :const:`~arkindex.ponos.models.Action` instance.
@@ -447,7 +410,7 @@ class TaskDefinitionSerializer(serializers.ModelSerializer):
     """
 
     env = serializers.DictField(default={})
-    workflow_id = serializers.UUIDField()
+    process_id = serializers.UUIDField()
     agent_id = serializers.PrimaryKeyRelatedField(queryset=Agent.objects.all())
     image_artifact_url = serializers.SerializerMethodField()
     s3_logs_put_url = serializers.SerializerMethodField()
@@ -488,7 +451,7 @@ class TaskDefinitionSerializer(serializers.ModelSerializer):
             "agent_id",
             "s3_logs_put_url",
             "parents",
-            "workflow_id",
+            "process_id",
             "gpu_id",
             "extra_files",
         )
@@ -554,7 +517,7 @@ class NewTaskSerializer(serializers.ModelSerializer):
     Serializes a :class:`~arkindex.ponos.models.Task` instance to permit creation by a parent.
     """
 
-    workflow_id = serializers.UUIDField()
+    process_id = serializers.UUIDField()
     command = serializers.CharField(required=False)
     env = serializers.DictField(
         child=serializers.CharField(), required=False, default={}
@@ -565,7 +528,7 @@ class NewTaskSerializer(serializers.ModelSerializer):
         model = Task
         fields = (
             "id",
-            "workflow_id",
+            "process_id",
             "slug",
             "parents",
             "image",
@@ -584,21 +547,21 @@ class NewTaskSerializer(serializers.ModelSerializer):
     def validate(self, data):
         parents = data["parents"]
 
-        ids = {parent.workflow.id for parent in parents}
-        if len(ids) != 1 or str(ids.pop()) != str(data["workflow_id"]):
+        ids = {parent.process_id for parent in parents}
+        if len(ids) != 1 or str(ids.pop()) != str(data["process_id"]):
             raise ValidationError(
-                "All parents must be in the same workflow as the child task"
+                "All parents must be in the same process as the child task"
             )
 
         runs = {parent.run for parent in parents}
         if len(runs) != 1:
             raise ValidationError(
-                "All parents must have the same run in the given workflow"
+                "All parents must have the same run in the given process"
             )
         data["run"] = runs.pop()
 
         if Task.objects.filter(
-            workflow_id=data["workflow_id"], run=data["run"], slug=data["slug"]
+            process_id=data["process_id"], run=data["run"], slug=data["slug"]
         ).exists():
             raise ValidationError(
                 f"A task with the `{data['slug']}` slug already exists in run {data['run']}."
@@ -608,8 +571,9 @@ class NewTaskSerializer(serializers.ModelSerializer):
 
         # Build task environment from PONOS_DEFAULT_ENV if the env is not (fully) defined in data
         data["env"] = {**settings.PONOS_DEFAULT_ENV, **data["env"]}
-        # Get process and corpus id from any of the parent tasks since it's the same for all of them
-        data["env"]["ARKINDEX_PROCESS_ID"] = parents[0].env['ARKINDEX_PROCESS_ID']
+        data["env"]["ARKINDEX_PROCESS_ID"] = str(data["process_id"])
+        # Get corpus id from any of the parent tasks since it's the same for all of them
+        # TODO: Get the corpus ID from the process using a ForeignKeyField on the serializer and data["process"]
         data["env"]["ARKINDEX_CORPUS_ID"] = parents[0].env.get('ARKINDEX_CORPUS_ID', None)
 
         # Set the task token manually so that we can immediately copy it to the environment variables,
diff --git a/arkindex/ponos/tests/test_api.py b/arkindex/ponos/tests/test_api.py
index 2b397cb93f..2de4b35dd2 100644
--- a/arkindex/ponos/tests/test_api.py
+++ b/arkindex/ponos/tests/test_api.py
@@ -20,7 +20,7 @@ from rest_framework import status
 from arkindex.documents.models import Corpus
 from arkindex.ponos.api import timezone as api_tz
 from arkindex.ponos.authentication import AgentUser
-from arkindex.ponos.models import ACTIVE_STATES, FINAL_STATES, GPU, Agent, Farm, Secret, State, Task, Workflow, encrypt
+from arkindex.ponos.models import ACTIVE_STATES, FINAL_STATES, GPU, Agent, Farm, Secret, State, Task, encrypt
 from arkindex.process.models import Process, ProcessMode, Revision, WorkerVersion
 from arkindex.project.tests import FixtureAPITestCase
 from arkindex.project.tools import build_public_key
@@ -40,12 +40,12 @@ class TestAPI(FixtureAPITestCase):
     @classmethod
     def setUpTestData(cls):
         super().setUpTestData()
-        cls.maxDiff = None
-        cls.farm = Farm.objects.get()
+        cls.default_farm = Farm.objects.get(name='Default farm')
+        cls.wheat_farm = Farm.objects.get(name='Wheat farm')
         pubkey = build_public_key()
         cls.agent = AgentUser.objects.create(
             id=uuid.UUID(hashlib.md5(pubkey.encode("utf-8")).hexdigest()),
-            farm=cls.farm,
+            farm=cls.wheat_farm,
             hostname="ghostname",
             cpu_cores=2,
             cpu_frequency=1e9,
@@ -56,8 +56,7 @@ class TestAPI(FixtureAPITestCase):
         cls.rev = Revision.objects.first()
         cls.process = Process.objects.get(mode=ProcessMode.Workers)
         cls.process.start()
-        cls.workflow = cls.process.workflow
-        cls.task1, cls.task2, cls.task3 = cls.workflow.tasks.all()
+        cls.task1, cls.task2, cls.task3 = cls.process.tasks.all()
         cls.dla = WorkerVersion.objects.get(worker__slug='dla')
         cls.recognizer = WorkerVersion.objects.get(worker__slug='reco')
         cls.gpu1 = cls.agent.gpus.create(
@@ -73,237 +72,6 @@ class TestAPI(FixtureAPITestCase):
             ram_total=8 * 1024 * 1024 * 1024,
         )
 
-    def _build_workflow_response(self, response, **kwargs):
-        """
-        Return the serialization of the test workflow.
-        Some parameters may be updated with kwargs.
-        """
-        self.task1.refresh_from_db()
-        self.task2.refresh_from_db()
-        self.task3.refresh_from_db()
-        self.workflow.refresh_from_db()
-        data = {
-            "id": str(self.workflow.id),
-            "created": self.workflow.created.strftime("%G-%m-%dT%H:%M:%S.%fZ"),
-            "finished": self.workflow.finished.strftime("%G-%m-%dT%H:%M:%S.%fZ")
-            if self.workflow.finished
-            else None,
-            "state": self.workflow.state.value,
-            "farm": {
-                "id": str(self.workflow.farm.id),
-                "name": self.workflow.farm.name,
-            },
-            "tasks": [
-                {
-                    "id": str(self.task1.id),
-                    "run": 0,
-                    "depth": 0,
-                    "slug": "initialisation",
-                    "state": self.task1.state.value,
-                    "parents": [],
-                    "tags": [],
-                    "shm_size": self.task1.shm_size,
-                    "url": response.wsgi_request.build_absolute_uri(
-                        reverse("api:task-details", args=[self.task1.id])
-                    ),
-                },
-                {
-                    "id": str(self.task2.id),
-                    "run": 0,
-                    "depth": 1,
-                    "slug": f"dla_{str(self.dla.id)[0:6]}",
-                    "state": self.task2.state.value,
-                    "parents": [str(self.task1.id)],
-                    "tags": [],
-                    "shm_size": self.task2.shm_size,
-                    "url": response.wsgi_request.build_absolute_uri(
-                        reverse("api:task-details", args=[self.task2.id])
-                    ),
-                },
-                {
-                    "id": str(self.task3.id),
-                    "run": 0,
-                    "depth": 2,
-                    "slug": f"reco_{str(self.recognizer.id)[0:6]}",
-                    "state": self.task3.state.value,
-                    "parents": [str(self.task2.id)],
-                    "tags": [],
-                    "shm_size": self.task3.shm_size,
-                    "url": response.wsgi_request.build_absolute_uri(
-                        reverse("api:task-details", args=[self.task3.id])
-                    ),
-                },
-            ],
-        }
-        data.update(kwargs)
-        return data
-
-    def test_workflow_details(self):
-        self.client.force_login(self.superuser)
-        with self.assertNumQueries(6):
-            resp = self.client.get(
-                reverse("api:workflow-details", args=[self.workflow.id]),
-            )
-            self.assertEqual(resp.status_code, status.HTTP_200_OK)
-        data = resp.json()
-
-        self.assertDictEqual(
-            data, self._build_workflow_response(resp, state="unscheduled")
-        )
-
-    def test_partial_update_workflow_stopping(self):
-        self.task1.state = State.Pending
-        self.task1.save()
-        self.client.force_login(self.superuser)
-        resp = self.client.patch(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Stopping.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_200_OK)
-        data = resp.json()
-
-        self.assertDictEqual(
-            data, self._build_workflow_response(resp, state="stopping")
-        )
-        self.assertEqual(self.task1.state.value, "stopping")
-        self.assertEqual(self.task2.state.value, "stopped")
-
-    def test_partial_update_workflow_stopping_finished(self):
-        self.task1.state = State.Completed
-        self.task1.save()
-        self.task2.state = State.Completed
-        self.task2.save()
-        self.task3.state = State.Completed
-        self.task3.save()
-        self.client.force_login(self.superuser)
-        resp = self.client.patch(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Stopping.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
-        data = resp.json()
-        self.assertEqual(data, {"state": ["Cannot stop a completed workflow"]})
-
-    def test_partial_update_workflow_forbidden_fields(self):
-        """
-        Only workflow state can be updated
-        """
-        new_id = uuid.uuid4()
-        new_farm_id = uuid.uuid4()
-        self.client.force_login(self.superuser)
-        resp = self.client.patch(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {
-                "id": new_id,
-                "recipe": {},
-                "updated": "2000-01-00T00:00:00.000000Z",
-                "finished": "2000-01-00T00:00:00.000000Z",
-                "tasks": [],
-                "farm": new_farm_id,
-            },
-            format="json",
-        )
-        self.assertEqual(resp.status_code, status.HTTP_200_OK)
-        self.assertDictEqual(resp.json(), self._build_workflow_response(resp))
-        self.workflow.refresh_from_db()
-        self.assertEqual(self.workflow.tasks.count(), 3)
-        self.assertNotEqual(self.workflow.id, new_id)
-        self.assertNotEqual(self.workflow.farm_id, new_farm_id)
-        self.assertNotEqual(self.workflow.updated.isoformat(), '2000-01-01T00:00:00+00:00')
-        self.assertNotEqual(self.workflow.finished.isoformat(), '2000-01-01T00:00:00+00:00')
-
-    def test_partial_update_workflow_only_stopping(self):
-        self.client.force_login(self.superuser)
-        resp = self.client.patch(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Completed.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
-        data = resp.json()
-        self.assertEqual(data, {"state": ["Can only change the state to 'stopping'"]})
-
-    def test_update_workflow_stopping(self):
-        self.task1.state = State.Pending
-        self.task1.save()
-        self.client.force_login(self.superuser)
-        resp = self.client.put(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Stopping.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_200_OK)
-        data = resp.json()
-
-        self.assertDictEqual(
-            data, self._build_workflow_response(resp, state="stopping")
-        )
-        self.assertEqual(self.task1.state.value, "stopping")
-        self.assertEqual(self.task2.state.value, "stopped")
-
-    def test_update_workflow_stopping_finished(self):
-        self.task1.state = State.Completed
-        self.task1.save()
-        self.task2.state = State.Completed
-        self.task2.save()
-        self.task3.state = State.Completed
-        self.task3.save()
-        self.client.force_login(self.superuser)
-        resp = self.client.put(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Stopping.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
-        data = resp.json()
-        self.assertEqual(data, {"state": ["Cannot stop a completed workflow"]})
-
-    def test_update_workflow_forbidden_fields(self):
-        """
-        Only workflow state can be updated
-        """
-        self.task1.state = State.Pending
-        self.task1.save()
-
-        new_id = uuid.uuid4()
-        new_farm_id = uuid.uuid4()
-        self.client.force_login(self.superuser)
-        resp = self.client.put(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {
-                "id": new_id,
-                "recipe": {},
-                "state": "stopping",
-                "updated": "2000-01-00T00:00:00.000000Z",
-                "finished": "2000-01-00T00:00:00.000000Z",
-                "tasks": [],
-                "farm": new_farm_id,
-            },
-            format="json",
-        )
-        self.assertEqual(resp.status_code, status.HTTP_200_OK)
-        self.assertDictEqual(resp.json(), self._build_workflow_response(resp))
-
-        self.workflow.refresh_from_db()
-        self.assertEqual(self.workflow.tasks.count(), 3)
-        self.assertNotEqual(self.workflow.id, new_id)
-        self.assertNotEqual(self.workflow.farm_id, new_farm_id)
-        self.assertNotEqual(self.workflow.updated.isoformat(), '2000-01-01T00:00:00+00:00')
-        self.assertIsNone(self.workflow.finished)
-
-        # Only the state was updated to Stopping
-        self.task1.refresh_from_db()
-        self.assertEqual(self.task1.state, State.Stopping)
-        self.task2.refresh_from_db()
-        self.assertEqual(self.task2.state, State.Stopped)
-
-    def test_update_workflow_only_stopping(self):
-        self.client.force_login(self.superuser)
-        resp = self.client.put(
-            reverse("api:workflow-details", kwargs={"pk": self.workflow.id}),
-            {"state": State.Completed.value},
-        )
-        self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
-        data = resp.json()
-        self.assertEqual(data, {"state": ["Can only change the state to 'stopping'"]})
-
     @patch("arkindex.ponos.aws.s3")
     @patch("arkindex.ponos.models.s3")
     def test_task_details(self, s3_mock, aws_s3_mock):
@@ -376,7 +144,7 @@ class TestAPI(FixtureAPITestCase):
             'image_artifact_url': None,
             'agent_id': None,
             'gpu_id': None,
-            'workflow_id': str(self.workflow.id),
+            'process_id': str(self.process.id),
             'extra_files': {},
             's3_logs_put_url': None,
         })
@@ -426,7 +194,7 @@ class TestAPI(FixtureAPITestCase):
             'image_artifact_url': f'http://testserver/api/v1/task/{str(self.task1.id)}/artifact/path/to/image.tar',
             'agent_id': None,
             'gpu_id': None,
-            'workflow_id': str(self.workflow.id),
+            'process_id': str(self.process.id),
             'extra_files': {},
             's3_logs_put_url': None,
         })
@@ -461,7 +229,7 @@ class TestAPI(FixtureAPITestCase):
             'image_artifact_url': None,
             'agent_id': None,
             'gpu_id': None,
-            'workflow_id': str(self.workflow.id),
+            'process_id': str(self.process.id),
             'extra_files': {},
             's3_logs_put_url': None,
         })
@@ -498,8 +266,8 @@ class TestAPI(FixtureAPITestCase):
 
         for state in FINAL_STATES:
             with self.subTest(state=state):
-                self.workflow.finished = None
-                self.workflow.save()
+                self.process.finished = None
+                self.process.save()
 
                 resp = self.client.patch(
                     reverse("api:task-details", args=[self.task3.id]),
@@ -511,8 +279,8 @@ class TestAPI(FixtureAPITestCase):
                 self.task3.refresh_from_db()
                 self.assertEqual(self.task3.state, state)
 
-                self.workflow.refresh_from_db()
-                self.assertEqual(self.workflow.finished, expected_datetime)
+                self.process.refresh_from_db()
+                self.assertEqual(self.process.finished, expected_datetime)
 
     @patch("arkindex.ponos.signals.task_failure.send_robust")
     @patch("arkindex.ponos.aws.s3")
@@ -706,7 +474,7 @@ class TestAPI(FixtureAPITestCase):
         self.task1.agent = self.agent
         self.task1.save()
 
-        with self.assertNumQueries(8):
+        with self.assertNumQueries(7):
             resp = self.client.put(
                 reverse("api:task-details", args=[self.task1.id]),
                 data={"state": State.Running.value},
@@ -720,6 +488,53 @@ class TestAPI(FixtureAPITestCase):
         self.task1.refresh_from_db()
         self.assertEqual(self.task1.state, State.Running)
 
+    @patch('arkindex.ponos.models.Task.short_logs', new_callable=PropertyMock)
+    def test_update_task_from_agent_completed_pends_children(self, short_logs_mock):
+        """
+        Child tasks whose parents are all completed get set to pending as soon as the agent updates the last parent
+        """
+        short_logs_mock.return_value = ""
+
+        self.task1.agent = self.agent
+        self.task2.agent = self.agent
+        self.task1.save()
+        self.task2.save()
+        self.task1.parents.clear()
+        self.task2.parents.clear()
+        self.task3.parents.set([self.task1, self.task2])
+
+        with self.assertNumQueries(11):
+            resp = self.client.put(
+                reverse("api:task-details", args=[self.task1.id]),
+                data={"state": State.Completed.value},
+                HTTP_AUTHORIZATION=f"Bearer {self.agent.token.access_token}",
+            )
+            self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+        self.task1.refresh_from_db()
+        self.task2.refresh_from_db()
+        self.task3.refresh_from_db()
+        self.assertEqual(self.task1.state, State.Completed)
+        self.assertEqual(self.task2.state, State.Unscheduled)
+        # Only one of the two parents is completed: nothing happens.
+        self.assertEqual(self.task3.state, State.Unscheduled)
+
+        with self.assertNumQueries(12):
+            resp = self.client.put(
+                reverse("api:task-details", args=[self.task2.id]),
+                data={"state": State.Completed.value},
+                HTTP_AUTHORIZATION=f"Bearer {self.agent.token.access_token}",
+            )
+            self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+        self.task1.refresh_from_db()
+        self.task2.refresh_from_db()
+        self.task3.refresh_from_db()
+        self.assertEqual(self.task1.state, State.Completed)
+        self.assertEqual(self.task2.state, State.Completed)
+        # Both parents are completed, the child task is now pending
+        self.assertEqual(self.task3.state, State.Pending)
+
     def test_update_running_task_state_stopping(self):
         self.task1.state = State.Running
         self.task1.save()
@@ -766,7 +581,7 @@ class TestAPI(FixtureAPITestCase):
         self.task1.save()
         self.client.force_login(self.superuser)
 
-        with self.assertNumQueries(4):
+        with self.assertNumQueries(6):
             resp = self.client.put(
                 reverse("api:task-update", args=[self.task1.id]),
                 data={"state": State.Pending.value},
@@ -807,7 +622,7 @@ class TestAPI(FixtureAPITestCase):
         self.task1.agent = self.agent
         self.task1.save()
 
-        with self.assertNumQueries(8):
+        with self.assertNumQueries(7):
             resp = self.client.patch(
                 reverse("api:task-details", args=[self.task1.id]),
                 data={"state": State.Running.value},
@@ -821,6 +636,53 @@ class TestAPI(FixtureAPITestCase):
         self.task1.refresh_from_db()
         self.assertEqual(self.task1.state, State.Running)
 
+    @patch('arkindex.ponos.models.Task.short_logs', new_callable=PropertyMock)
+    def test_partial_update_task_from_agent_completed_pends_children(self, short_logs_mock):
+        """
+        Child tasks whose parents are all completed get set to pending as soon as the agent updates the last parent
+        """
+        short_logs_mock.return_value = ""
+
+        self.task1.agent = self.agent
+        self.task2.agent = self.agent
+        self.task1.save()
+        self.task2.save()
+        self.task1.parents.clear()
+        self.task2.parents.clear()
+        self.task3.parents.set([self.task1, self.task2])
+
+        with self.assertNumQueries(11):
+            resp = self.client.patch(
+                reverse("api:task-details", args=[self.task1.id]),
+                data={"state": State.Completed.value},
+                HTTP_AUTHORIZATION=f"Bearer {self.agent.token.access_token}",
+            )
+            self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+        self.task1.refresh_from_db()
+        self.task2.refresh_from_db()
+        self.task3.refresh_from_db()
+        self.assertEqual(self.task1.state, State.Completed)
+        self.assertEqual(self.task2.state, State.Unscheduled)
+        # Only one of the two parents is completed: nothing happens.
+        self.assertEqual(self.task3.state, State.Unscheduled)
+
+        with self.assertNumQueries(12):
+            resp = self.client.patch(
+                reverse("api:task-details", args=[self.task2.id]),
+                data={"state": State.Completed.value},
+                HTTP_AUTHORIZATION=f"Bearer {self.agent.token.access_token}",
+            )
+            self.assertEqual(resp.status_code, status.HTTP_200_OK)
+
+        self.task1.refresh_from_db()
+        self.task2.refresh_from_db()
+        self.task3.refresh_from_db()
+        self.assertEqual(self.task1.state, State.Completed)
+        self.assertEqual(self.task2.state, State.Completed)
+        # Both parents are completed, the child task is now pending
+        self.assertEqual(self.task3.state, State.Pending)
+
     def test_partial_update_running_task_state_stopping(self):
         self.task1.state = State.Running
         self.task1.save()
@@ -868,7 +730,7 @@ class TestAPI(FixtureAPITestCase):
         self.task1.save()
         self.client.force_login(self.superuser)
 
-        with self.assertNumQueries(4):
+        with self.assertNumQueries(6):
             resp = self.client.patch(
                 reverse("api:task-update", args=[self.task1.id]),
                 data={"state": State.Pending.value},
@@ -941,7 +803,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 42,
                 "cpu_frequency": 1337e6,
                 "ram_total": 16e9,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "public_key": build_public_key(),
                 "derivation": "{:032x}".format(random.getrandbits(128)),
                 "gpus": [
@@ -989,7 +851,7 @@ class TestAPI(FixtureAPITestCase):
                 "hostname": "toastname",
                 "cpu_cores": 42,
                 "cpu_frequency": int(1337e6),
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "ram_total": 16_000_000_000,
                 "cpu_load": None,
                 "ram_load": None,
@@ -1025,7 +887,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 12,
                 "cpu_frequency": 1e9,
                 "ram_total": 32e9,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "public_key": self.agent.public_key,
                 "derivation": "{:032x}".format(random.getrandbits(128)),
                 "gpus": [
@@ -1064,7 +926,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 12,
                 "cpu_frequency": 1000000000,
                 "cpu_load": None,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "gpus": [
                     {
                         "id": "deadbeef-c6bd-4de6-ae92-866a270be36f",
@@ -1094,7 +956,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 42,
                 "cpu_frequency": 1337e6,
                 "ram_total": 16e9,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "public_key": build_public_key(),
                 "derivation": "{:032x}".format(random.getrandbits(128)),
                 "gpus": [
@@ -1147,7 +1009,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 12,
                 "cpu_frequency": 1e9,
                 "ram_total": 32e9,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "public_key": self.agent.public_key,
                 "derivation": "{:032x}".format(random.getrandbits(128)),
                 "gpus": [
@@ -1191,7 +1053,7 @@ class TestAPI(FixtureAPITestCase):
         )
 
         # Add 1 to the farm's seed
-        wrong_seed = "{:064x}".format(int(self.farm.seed, 16) + 1)
+        wrong_seed = "{:064x}".format(int(self.wheat_farm.seed, 16) + 1)
 
         # Perform derivation with the wrong seed
         shared_key = agent_private_key.exchange(ec.ECDH(), server_public_key)
@@ -1210,7 +1072,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_cores": 42,
                 "cpu_frequency": 1337e6,
                 "ram_total": 16e9,
-                "farm": str(self.farm.id),
+                "farm": str(self.wheat_farm.id),
                 "public_key": agent_public_bytes.decode("utf-8"),
                 "derivation": base64.b64encode(derived_key).decode("utf-8"),
                 "gpus": [],
@@ -1267,7 +1129,7 @@ class TestAPI(FixtureAPITestCase):
         """
         Only pending tasks may be retrieved as new actions
         """
-        self.workflow.tasks.update(state=State.Error)
+        self.process.tasks.update(state=State.Error)
         with self.assertNumQueries(7):
             resp = self.client.get(
                 reverse("api:agent-actions"),
@@ -1281,11 +1143,11 @@ class TestAPI(FixtureAPITestCase):
         """
         An agent may not take another agent's tasks
         """
-        self.workflow.tasks.update(agent=self.agent, state=State.Pending)
+        self.process.tasks.update(agent=self.agent, state=State.Pending)
         pubkey = build_public_key()
         agent2 = AgentUser.objects.create(
             id=uuid.UUID(hashlib.md5(pubkey.encode("utf-8")).hexdigest()),
-            farm=self.farm,
+            farm=self.wheat_farm,
             hostname="agentorange",
             cpu_cores=2,
             cpu_frequency=1e9,
@@ -1306,7 +1168,9 @@ class TestAPI(FixtureAPITestCase):
         """
         Agent may retrieve one task using the API due to its resources limitations
         """
-        self.workflow.tasks.update(state=State.Pending)
+        self.process.farm = self.agent.farm
+        self.process.save()
+        self.process.tasks.update(state=State.Pending)
         now = timezone.now()
 
         with patch.object(api_tz, "now") as api_now_mock:
@@ -1447,7 +1311,7 @@ class TestAPI(FixtureAPITestCase):
         del data["s3_put_url"]
         self.assertTrue(
             s3_put_url.startswith(
-                f"http://somewhere/ponos-artifacts/{self.workflow.id}/{self.task1.id}/some/path.txt"
+                f"http://somewhere/ponos-artifacts/{self.task1.id}/some/path.txt"
             )
         )
 
@@ -1542,7 +1406,7 @@ class TestAPI(FixtureAPITestCase):
         s3_put_url = data.pop("s3_put_url")
         self.assertTrue(
             s3_put_url.startswith(
-                f"http://somewhere/ponos-artifacts/{self.workflow.id}/{self.task1.id}/some/path.txt"
+                f"http://somewhere/ponos-artifacts/{self.task1.id}/some/path.txt"
             )
         )
 
@@ -1597,7 +1461,7 @@ class TestAPI(FixtureAPITestCase):
         self.assertTrue(resp.has_header("Location"))
         self.assertTrue(
             resp["Location"].startswith(
-                f"http://somewhere/ponos-artifacts/{self.workflow.id}/{self.task1.id}/path/to/file.json"
+                f"http://somewhere/ponos-artifacts/{self.task1.id}/path/to/file.json"
             )
         )
 
@@ -1610,7 +1474,7 @@ class TestAPI(FixtureAPITestCase):
                 "image": ["This field is required."],
                 "parents": ["This field is required."],
                 "slug": ["This field is required."],
-                "workflow_id": ["This field is required."],
+                "process_id": ["This field is required."],
             },
         )
 
@@ -1618,7 +1482,7 @@ class TestAPI(FixtureAPITestCase):
         response = self.client.post(
             reverse("api:task-create"),
             data={
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "test_task",
                 "image": "registry.gitlab.com/test",
                 "parents": [],
@@ -1631,8 +1495,12 @@ class TestAPI(FixtureAPITestCase):
         )
 
     def test_task_create_distinct_workflows_on_parents(self):
-        workflow2 = Workflow.objects.create(farm=self.farm)
-        task3 = workflow2.tasks.create(
+        process2 = Process.objects.create(
+            farm=self.wheat_farm,
+            mode=ProcessMode.Repository,
+            creator=self.superuser,
+        )
+        task3 = process2.tasks.create(
             run=0,
             depth=1,
             slug="task_parent",
@@ -1642,7 +1510,7 @@ class TestAPI(FixtureAPITestCase):
         response = self.client.post(
             reverse("api:task-create"),
             data={
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "test_task",
                 "image": "registry.gitlab.com/test",
                 "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
@@ -1654,13 +1522,13 @@ class TestAPI(FixtureAPITestCase):
             response.json(),
             {
                 "non_field_errors": [
-                    "All parents must be in the same workflow as the child task"
+                    "All parents must be in the same process as the child task"
                 ]
             },
         )
 
     def test_task_create_distinct_runs_on_parents(self):
-        task3 = self.workflow.tasks.create(
+        task3 = self.process.tasks.create(
             run=1,
             depth=1,
             slug="task_parent",
@@ -1670,7 +1538,7 @@ class TestAPI(FixtureAPITestCase):
         response = self.client.post(
             reverse("api:task-create"),
             data={
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "test_task",
                 "image": "registry.gitlab.com/test",
                 "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
@@ -1682,13 +1550,13 @@ class TestAPI(FixtureAPITestCase):
             response.json(),
             {
                 "non_field_errors": [
-                    "All parents must have the same run in the given workflow"
+                    "All parents must have the same run in the given process"
                 ]
             },
         )
 
     def test_task_create_duplicate(self):
-        self.workflow.tasks.create(
+        self.process.tasks.create(
             run=0,
             depth=3,
             slug="sibling",
@@ -1698,7 +1566,7 @@ class TestAPI(FixtureAPITestCase):
         response = self.client.post(
             reverse("api:task-create"),
             data={
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "sibling",
                 "image": "registry.gitlab.com/test",
                 "parents": [str(self.task1.id), str(self.task2.id)],
@@ -1716,18 +1584,18 @@ class TestAPI(FixtureAPITestCase):
         )
 
     def test_task_create(self):
-        task3 = self.workflow.tasks.create(
+        task3 = self.process.tasks.create(
             run=0,
             depth=3,
             slug="task_parent",
             image="registry.gitlab.com/test",
         )
 
-        with self.assertNumQueries(11):
+        with self.assertNumQueries(8):
             response = self.client.post(
                 reverse("api:task-create"),
                 data={
-                    "workflow_id": str(self.workflow.id),
+                    "process_id": str(self.process.id),
                     "slug": "test_task",
                     "image": "registry.gitlab.com/test",
                     "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
@@ -1738,13 +1606,13 @@ class TestAPI(FixtureAPITestCase):
             )
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
-        new_task = self.workflow.tasks.get(slug='test_task')
+        new_task = self.process.tasks.get(slug='test_task')
 
         self.assertDictEqual(
             response.json(),
             {
                 "id": str(new_task.id),
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "test_task",
                 "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
                 "image": "registry.gitlab.com/test",
@@ -1766,18 +1634,18 @@ class TestAPI(FixtureAPITestCase):
         )
 
     def test_task_create_has_docker_socket_true(self):
-        task3 = self.workflow.tasks.create(
+        task3 = self.process.tasks.create(
             run=0,
             depth=3,
             slug="task_parent",
             image="registry.gitlab.com/test",
         )
 
-        with self.assertNumQueries(11):
+        with self.assertNumQueries(8):
             response = self.client.post(
                 reverse("api:task-create"),
                 data={
-                    "workflow_id": str(self.workflow.id),
+                    "process_id": str(self.process.id),
                     "slug": "test_task",
                     "image": "registry.gitlab.com/test",
                     "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
@@ -1789,13 +1657,13 @@ class TestAPI(FixtureAPITestCase):
             )
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
-        new_task = self.workflow.tasks.get(slug='test_task')
+        new_task = self.process.tasks.get(slug='test_task')
 
         self.assertDictEqual(
             response.json(),
             {
                 "id": str(new_task.id),
-                "workflow_id": str(self.workflow.id),
+                "process_id": str(self.process.id),
                 "slug": "test_task",
                 "parents": [str(self.task1.id), str(self.task2.id), str(task3.id)],
                 "image": "registry.gitlab.com/test",
@@ -1888,33 +1756,6 @@ class TestAPI(FixtureAPITestCase):
                     response = self.client.get(reverse("api:secret-details", kwargs={"name": account_name}))
                     self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
 
-    @override_settings(PONOS_PRIVATE_KEY=PONOS_PRIVATE_KEY)
-    def test_task_retrieve_secret_requires_process(self):
-        account_name = "bank_account/0001/private"
-        secret = Secret.objects.create(
-            name=account_name,
-            nonce=b"1337" * 4,
-            content=encrypt(b"1337" * 4, "1337$"),
-        )
-        self.assertEqual(secret.content, b"\xc1\x81\xc0\xceo")
-
-        workflow = Workflow.objects.create(farm_id=self.farm.id)
-        self.task1.workflow = workflow
-        self.task1.save()
-        self.task1.refresh_from_db()
-
-        with self.assertNumQueries(1):
-            response = self.client.get(
-                reverse("api:secret-details", kwargs={"name": account_name}),
-                HTTP_AUTHORIZATION=f'Ponos {self.task1.token}',
-            )
-            self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
-
-        self.assertDictEqual(
-            response.json(),
-            {'detail': 'Task has no process.'},
-        )
-
     @override_settings(PONOS_PRIVATE_KEY=PONOS_PRIVATE_KEY)
     def test_task_retrieve_secret_requires_active_user(self):
         account_name = "bank_account/0001/private"
@@ -1986,7 +1827,7 @@ class TestAPI(FixtureAPITestCase):
                 Task(
                     run=0,
                     depth=0,
-                    workflow=self.workflow,
+                    process=self.process,
                     slug=state.value,
                     state=state,
                     agent=self.agent,
@@ -2012,7 +1853,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_frequency": 1000000000,
                 "cpu_load": None,
                 "farm": {
-                    "id": str(self.farm.id),
+                    "id": str(self.wheat_farm.id),
                     "name": "Wheat farm",
                 },
                 "gpus": [
@@ -2046,7 +1887,7 @@ class TestAPI(FixtureAPITestCase):
                 Task(
                     run=0,
                     depth=0,
-                    workflow=self.workflow,
+                    process=self.process,
                     slug=state.value,
                     state=state,
                     agent=self.agent,
@@ -2073,7 +1914,7 @@ class TestAPI(FixtureAPITestCase):
                 "cpu_frequency": 1000000000,
                 "cpu_load": None,
                 "farm": {
-                    "id": str(self.farm.id),
+                    "id": str(self.wheat_farm.id),
                     "name": "Wheat farm",
                 },
                 "gpus": [
@@ -2115,7 +1956,6 @@ class TestAPI(FixtureAPITestCase):
         """
         Any user is able to list farms basic information
         """
-        barley_farm = Farm.objects.create(name="Barley")
         with self.assertNumQueries(2):
             response = self.client.get(reverse("api:farm-list"))
             self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -2127,8 +1967,8 @@ class TestAPI(FixtureAPITestCase):
                 "previous": None,
                 "next": None,
                 "results": [
-                    {"id": str(barley_farm.id), "name": "Barley"},
-                    {"id": str(self.farm.id), "name": "Wheat farm"},
+                    {"id": str(self.default_farm.id), "name": "Default farm"},
+                    {"id": str(self.wheat_farm.id), "name": "Wheat farm"},
                 ],
             },
         )
diff --git a/arkindex/ponos/tests/test_models.py b/arkindex/ponos/tests/test_models.py
index 13fed4d6d6..45457f7e45 100644
--- a/arkindex/ponos/tests/test_models.py
+++ b/arkindex/ponos/tests/test_models.py
@@ -6,7 +6,7 @@ from django.db.models import prefetch_related_objects
 from django.test import override_settings
 from django.utils import timezone
 
-from arkindex.ponos.models import FINAL_STATES, Agent, Farm, Secret, State, Workflow, build_aes_cipher, encrypt
+from arkindex.ponos.models import FINAL_STATES, Agent, Farm, Secret, State, build_aes_cipher, encrypt
 from arkindex.process.models import ProcessMode
 from arkindex.project.tests import FixtureAPITestCase
 
@@ -18,8 +18,7 @@ class TestModels(FixtureAPITestCase):
         cls.farm = Farm.objects.create(name="Cryptominers")
         cls.process = cls.corpus.processes.create(creator=cls.user, mode=ProcessMode.Workers)
         cls.process.start()
-        cls.workflow = cls.process.workflow
-        cls.task1 = cls.workflow.tasks.first()
+        cls.task1 = cls.process.tasks.first()
         cls.nonce = b"42" + b"0" * 14
 
     def setUp(self):
@@ -54,14 +53,14 @@ class TestModels(FixtureAPITestCase):
                     self.task1.is_final(), msg="{} should be final".format(state)
                 )
                 self.assertTrue(
-                    self.workflow.is_final(), msg="{} should be final".format(state)
+                    self.process.is_final, msg="{} should be final".format(state)
                 )
             else:
                 self.assertFalse(
                     self.task1.is_final(), msg="{} should not be final".format(state)
                 )
                 self.assertFalse(
-                    self.workflow.is_final(), msg="{} should not be final".format(state)
+                    self.process.is_final, msg="{} should not be final".format(state)
                 )
 
     def test_delete_agent_non_final(self):
@@ -138,7 +137,7 @@ class TestModels(FixtureAPITestCase):
         self.assertFalse(self.task1.requires_gpu)
 
     def test_activate_requires_gpu(self):
-        test_task = self.process.build_task('do something --like this', 'test_task', self.workflow, {}, requires_gpu=True)
+        test_task = self.process.build_task('do something --like this', 'test_task', {}, requires_gpu=True)
         self.assertEqual(test_task.requires_gpu, True)
 
     @patch("arkindex.ponos.models.timezone")
@@ -146,21 +145,18 @@ class TestModels(FixtureAPITestCase):
         timezone_mock.now.return_value = timezone.datetime(3000, 1, 12).astimezone()
         # Expecting a default expiry 30 days after timezone.now
         expected_expiry = timezone.datetime(3000, 2, 11).astimezone()
-        workflow = Workflow.objects.create(
-            farm=self.farm
-        )
 
-        # A workflow with no tasks has no expiry
-        self.assertFalse(workflow.tasks.exists())
-        self.assertIsNone(workflow.expiry)
+        # A process with no tasks has no expiry
+        self.process.tasks.all().delete()
+        self.assertIsNone(self.process.expiry)
 
-        task1 = workflow.tasks.create(
+        task1 = self.process.tasks.create(
             command='do something --like this',
             slug='test_task_1',
             run=1,
             depth=0
         )
-        task2 = workflow.tasks.create(
+        task2 = self.process.tasks.create(
             command='do something else --like that',
             slug='test_task_2',
             run=1,
@@ -169,7 +165,7 @@ class TestModels(FixtureAPITestCase):
 
         self.assertEqual(task1.expiry, expected_expiry)
         self.assertEqual(task2.expiry, expected_expiry)
-        self.assertEqual(workflow.expiry, expected_expiry)
+        self.assertEqual(self.process.expiry, expected_expiry)
 
         # Override a task's expiry
         custom_expiry = timezone.datetime(3000, 4, 20).astimezone()
@@ -177,56 +173,56 @@ class TestModels(FixtureAPITestCase):
         task2.save()
 
         # The workflow's expiry should be the latest expiry
-        self.assertEqual(workflow.expiry, custom_expiry)
+        self.assertEqual(self.process.expiry, custom_expiry)
 
-    def test_workflow_expiry_query_count(self):
+    def test_process_expiry_query_count(self):
         """
         Workflow.expiry causes an SQL query only when tasks are not prefetched
         """
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.expiry, self.task1.expiry)
+            self.assertEqual(self.process.expiry, self.task1.expiry)
 
         # Request the expiry again: it is not cached, there still is an SQL query
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.expiry, self.task1.expiry)
+            self.assertEqual(self.process.expiry, self.task1.expiry)
 
-        prefetch_related_objects([self.workflow], "tasks")
+        prefetch_related_objects([self.process], "tasks")
         with self.assertNumQueries(0):
-            self.assertEqual(self.workflow.expiry, self.task1.expiry)
+            self.assertEqual(self.process.expiry, self.task1.expiry)
 
-    def test_workflow_get_state(self):
+    def test_process_get_state(self):
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.state, State.Unscheduled)
+            self.assertEqual(self.process.state, State.Unscheduled)
 
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.get_state(0), State.Unscheduled)
+            self.assertEqual(self.process.get_state(0), State.Unscheduled)
 
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.get_state(1), State.Unscheduled)
+            self.assertEqual(self.process.get_state(1), State.Unscheduled)
 
         # Negative run numbers should not result in any SQL query, since we know they are always empty
         with self.assertNumQueries(0):
-            self.assertEqual(self.workflow.get_state(-1), State.Unscheduled)
+            self.assertEqual(self.process.get_state(-1), State.Unscheduled)
 
         self.task1.state = State.Running
         self.task1.save()
 
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.state, State.Running)
+            self.assertEqual(self.process.state, State.Running)
 
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.get_state(0), State.Running)
+            self.assertEqual(self.process.get_state(0), State.Running)
 
         with self.assertNumQueries(1):
-            self.assertEqual(self.workflow.get_state(1), State.Unscheduled)
+            self.assertEqual(self.process.get_state(1), State.Unscheduled)
 
         with self.assertNumQueries(0):
-            self.assertEqual(self.workflow.get_state(-1), State.Unscheduled)
+            self.assertEqual(self.process.get_state(-1), State.Unscheduled)
 
-        prefetch_related_objects([self.workflow], "tasks")
+        prefetch_related_objects([self.process], "tasks")
 
         with self.assertNumQueries(0):
-            self.assertEqual(self.workflow.state, State.Running)
-            self.assertEqual(self.workflow.get_state(0), State.Running)
-            self.assertEqual(self.workflow.get_state(1), State.Unscheduled)
-            self.assertEqual(self.workflow.get_state(-1), State.Unscheduled)
+            self.assertEqual(self.process.state, State.Running)
+            self.assertEqual(self.process.get_state(0), State.Running)
+            self.assertEqual(self.process.get_state(1), State.Unscheduled)
+            self.assertEqual(self.process.get_state(-1), State.Unscheduled)
diff --git a/arkindex/ponos/tests/test_tasks_attribution.py b/arkindex/ponos/tests/test_tasks_attribution.py
index 32ca37f672..0b9139b0f3 100644
--- a/arkindex/ponos/tests/test_tasks_attribution.py
+++ b/arkindex/ponos/tests/test_tasks_attribution.py
@@ -2,22 +2,27 @@ import uuid
 from datetime import timedelta
 from unittest.mock import patch
 
-from django.test import TestCase
 from django.utils import timezone
 
-from arkindex.ponos.models import ACTIVE_STATES, FINAL_STATES, Agent, Farm, State, Task, Workflow
+from arkindex.ponos.models import ACTIVE_STATES, FINAL_STATES, Agent, Farm, State, Task
 from arkindex.ponos.models import timezone as model_tz
+from arkindex.process.models import Process, ProcessMode, WorkerVersion, WorkerVersionState
+from arkindex.project.tests import FixtureTestCase
 
 
-class TasksAttributionTestCase(TestCase):
+class TasksAttributionTestCase(FixtureTestCase):
     """
     Ponos server distribute tasks equally among agents.
     """
 
     @classmethod
     def setUpTestData(cls):
-        cls.farm = Farm.objects.create(name="testfarm")
-        cls.workflow = Workflow.objects.create(farm=cls.farm)
+        super().setUpTestData()
+        cls.farm = Farm.objects.get(name='Wheat farm')
+        cls.process = Process.objects.create(creator=cls.superuser, mode=ProcessMode.Repository, farm=cls.farm)
+        # Remove all task fixtures so we can fully control task assignation
+        WorkerVersion.objects.filter(state=WorkerVersionState.Available).update(state=WorkerVersionState.Error)
+        Task.objects.all().delete()
 
     def _run_tasks(self, tasks):
         """
@@ -53,7 +58,7 @@ class TasksAttributionTestCase(TestCase):
         params = {
             "run": 0,
             "depth": 0,
-            "workflow": self.workflow,
+            "process": self.process,
             "state": State.Pending,
         }
         params.update(kwargs)
@@ -307,7 +312,7 @@ class TasksAttributionTestCase(TestCase):
             Task(
                 depth=0,
                 run=0,
-                workflow=self.workflow,
+                process=self.process,
                 requires_gpu=True,
                 slug=f'task{i}',
                 state=state,
@@ -317,7 +322,7 @@ class TasksAttributionTestCase(TestCase):
         )
 
         # Create another task that needs a GPU and should be assigned to this agent
-        new_task = self.workflow.tasks.create(
+        new_task = self.process.tasks.create(
             run=0,
             depth=0,
             slug='new_task',
@@ -340,10 +345,10 @@ class TasksAttributionTestCase(TestCase):
         gpu = gpu_agent.gpus.create(id=uuid.uuid4(), index=0, ram_total=16e12, name='My cool GPU')
 
         for state in ACTIVE_STATES:
-            self.workflow.tasks.all().delete()
+            self.process.tasks.all().delete()
             with self.subTest(state=state):
                 # Create a task that is running on this agent's GPU
-                self.workflow.tasks.create(
+                self.process.tasks.create(
                     run=0,
                     depth=0,
                     slug='current_task',
@@ -354,7 +359,7 @@ class TasksAttributionTestCase(TestCase):
                 )
 
                 # Create another task that needs a GPU
-                new_task = self.workflow.tasks.create(
+                new_task = self.process.tasks.create(
                     run=0,
                     depth=0,
                     slug='new_task',
@@ -387,8 +392,12 @@ class TasksAttributionTestCase(TestCase):
             for i in range(1, 3)
         ]
 
-        corn_workflow = corn_farm.workflows.create()
-        tasks = self._add_pending_tasks(3, workflow=corn_workflow)
+        corn_process = corn_farm.processes.create(
+            mode=ProcessMode.Workers,
+            creator=self.superuser,
+            corpus=self.corpus,
+        )
+        tasks = self._add_pending_tasks(3, process=corn_process)
 
         self.assertEqual(Task.objects.count(), 6)
         self.assertEqual(
@@ -400,7 +409,7 @@ class TasksAttributionTestCase(TestCase):
         tasks = test_agent.next_tasks()
         self.assertEqual(len(tasks), 3)
         self.assertEqual(
-            set([task.workflow.farm_id for task in tasks]), set([self.farm.id])
+            set([task.process.farm_id for task in tasks]), {self.farm.id}
         )
 
         corn_tasks_1 = corn_agent_1.next_tasks()
diff --git a/arkindex/ponos/tests/test_workflow.py b/arkindex/ponos/tests/test_workflow.py
deleted file mode 100644
index 2b51116f5f..0000000000
--- a/arkindex/ponos/tests/test_workflow.py
+++ /dev/null
@@ -1,135 +0,0 @@
-from unittest.mock import patch
-
-from django.test import TestCase
-from django.urls import reverse
-from django.utils import timezone
-from rest_framework import status
-
-from arkindex.ponos.models import Agent, Farm, State, Workflow
-
-
-class WorkflowTestCase(TestCase):
-    """
-    Create some workflows & tasks
-    """
-
-    @classmethod
-    def setUpTestData(cls):
-        cls.farm = Farm.objects.create(name="testfarm")
-        # Create a fake Agent instance
-        cls.agent = Agent.objects.create(
-            hostname="test_agent",
-            cpu_cores=2,
-            cpu_frequency=1e9,
-            public_key="",
-            farm=cls.farm,
-            ram_total=2e9,
-            last_ping=timezone.now(),
-        )
-
-    def test_workflow_running_override(self):
-        """
-        Test that a single running task in a workflow will override any other state
-        """
-        w = Workflow.objects.create(farm=self.farm)
-        t1 = w.tasks.create(image='hello-world', run=0, depth=0, slug='t1')
-        t2 = w.tasks.create(image='hello-world', run=0, depth=1, slug='t2')
-        t3 = w.tasks.create(image='hello-world', run=0, depth=2, slug='t3')
-        t2.parents.add(t1)
-        t3.parents.add(t2)
-
-        self.assertEqual(w.state, State.Unscheduled)
-
-        t1.state = State.Running
-        t1.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Error
-        t2.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Failed
-        t2.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Stopping
-        t2.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Stopped
-        t2.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Pending
-        t2.save()
-        self.assertEqual(w.state, State.Running)
-
-        t2.state = State.Completed
-        t3.state = State.Completed
-        t2.save()
-        t3.save()
-        self.assertEqual(w.state, State.Running)
-
-    @patch("arkindex.ponos.models.s3")
-    @patch("arkindex.ponos.models.Task.s3_logs_get_url")
-    def test_task_parents_update(self, s3_mock, s3_logs_mock):
-        w = Workflow.objects.create(farm=self.farm)
-
-        parent1 = w.tasks.create(slug="parent1", image='hello-world', run=0, depth=0)
-        parent2 = w.tasks.create(slug="parent2", image='hello-world', run=0, depth=0)
-        child = w.tasks.create(slug="test", image='hello-world', run=0, depth=1)
-        child.parents.set([parent1, parent2])
-
-        response = self.client.get(
-            reverse("api:agent-actions"),
-            HTTP_AUTHORIZATION="Bearer {}".format(self.agent.token.access_token),
-            data={"cpu_load": 0.99, "ram_load": 0.49},
-        )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        actions = response.json()["actions"]
-        self.assertEqual(len(actions), 1)
-        self.assertNotEqual(actions[0]["task_id"], str(child.id))
-
-        parent1.state = State.Completed
-        parent1.save()
-
-        parent2.agent = self.agent
-        parent2.save()
-
-        self.client.get(
-            reverse("api:agent-actions"),
-            HTTP_AUTHORIZATION="Bearer {}".format(self.agent.token.access_token),
-        )
-        # Completing 2nd parent should change test_task state to pending
-        response = self.client.patch(
-            reverse("api:task-details", kwargs={"pk": str(parent2.id)}),
-            HTTP_AUTHORIZATION="Bearer {}".format(self.agent.token.access_token),
-            data={"state": State.Completed.value},
-            content_type="application/json",
-        )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-
-        child.refresh_from_db()
-        self.assertEqual(child.state, State.Pending)
-        self.assertEqual(w.state, State.Pending)
-
-    def test_workflow_retry_updates_finished(self):
-        w = Workflow.objects.create(farm=self.farm)
-        w.tasks.create(image='hello-world', slug='task', state=State.Completed, run=0, depth=0)
-        w.finished = timezone.now()
-        w.save()
-
-        with self.assertNumQueries(7):
-            w.retry()
-        self.assertEqual(w.tasks.count(), 2)
-        self.assertIsNone(w.finished)
-
-    def test_workflow_stop_unscheduled_updates_finished(self):
-        w = Workflow.objects.create(farm=self.farm)
-        task = w.tasks.create(image='hello-world', slug='task', state=State.Unscheduled, run=0, depth=0)
-
-        # Stopping the workflow causes all tasks to immediately be Stopped
-        w.stop()
-        self.assertIsNotNone(w.finished)
-        task.refresh_from_db()
-        self.assertEqual(task.state, State.Stopped)
diff --git a/arkindex/ponos/utils.py b/arkindex/ponos/utils.py
index ace6ca3ead..e20ad3fc11 100644
--- a/arkindex/ponos/utils.py
+++ b/arkindex/ponos/utils.py
@@ -7,4 +7,4 @@ def is_admin_or_ponos_task(request):
 
 def get_process_from_task_auth(request):
     if isinstance(request.auth, Task):
-        return request.auth.workflow.process
+        return request.auth.process
diff --git a/arkindex/ponos/validators.py b/arkindex/ponos/validators.py
deleted file mode 100644
index d04e99551a..0000000000
--- a/arkindex/ponos/validators.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from django.core import validators
-
-
-class HiddenCallableValidatorMixin(object):
-    """
-    Implements a workaround for some issues with error messages in DRF
-    and with drf-spectacular OpenAPI schema generation when the `limit_value`
-    of any validator extending django.core.validators.BaseValidator is
-    a callable.  This rewrites `self.limit_value` as a property,
-    which calls the original limit value when it is callable while making
-    Django, DRF and Spectacular believe it isn't callable.
-
-    https://github.com/encode/django-rest-framework/discussions/8833
-    https://github.com/tfranzel/drf-spectacular/issues/913
-    """
-
-    def __init__(self, limit_value, message=None):
-        self._limit_value = limit_value
-        if message:
-            self.message = message
-
-    @property
-    def limit_value(self):
-        return self._limit_value() if callable(self._limit_value) else self._limit_value
-
-
-class MaxValueValidator(HiddenCallableValidatorMixin, validators.MaxValueValidator):
-    pass
diff --git a/arkindex/process/admin.py b/arkindex/process/admin.py
index 1bccf7d57a..41ad6f7bd8 100644
--- a/arkindex/process/admin.py
+++ b/arkindex/process/admin.py
@@ -37,19 +37,19 @@ class ProcessAdmin(admin.ModelAdmin):
     list_display = ('id', 'creator', 'corpus', 'state', 'mode')
     list_filter = [('mode', EnumFieldListFilter), ]
     fieldsets = (
-        (None, {'fields': ('id', 'name', 'creator', 'corpus', 'state', 'mode', 'workflow', 'activity_state', 'template')}),
+        (None, {'fields': ('id', 'name', 'creator', 'corpus', 'state', 'mode', 'activity_state', 'template')}),
         ('Elements filters', {
             'fields': ('element', 'element_type', 'folder_type', 'name_contains')
         }),
     )
-    readonly_fields = ('id', 'name', 'workflow', 'state', 'activity_state', 'template')
+    readonly_fields = ('id', 'name', 'state', 'activity_state', 'template')
     raw_id_fields = ('element', )
     inlines = [DataFileInline, WorkerRunInline, ElementInline]
 
     def get_queryset(self, *args, **kwargs):
         return super().get_queryset(*args, **kwargs) \
-            .prefetch_related('workflow__tasks', 'corpus') \
-            .annotate(last_run=Max('workflow__tasks__run'))
+            .prefetch_related('tasks', 'corpus') \
+            .annotate(last_run=Max('tasks__run'))
 
 
 class DataFileAdmin(admin.ModelAdmin):
diff --git a/arkindex/process/api.py b/arkindex/process/api.py
index ee0757628b..ccfd140726 100644
--- a/arkindex/process/api.py
+++ b/arkindex/process/api.py
@@ -1,5 +1,4 @@
 import logging
-from collections import defaultdict
 from datetime import timedelta
 from textwrap import dedent
 from uuid import UUID
@@ -7,8 +6,8 @@ from uuid import UUID
 from django.conf import settings
 from django.core.mail import send_mail
 from django.db import transaction
-from django.db.models import Avg, CharField, Count, DurationField, F, Max, Min, Q, Value
-from django.db.models.functions import Cast, Coalesce, Concat, Greatest, Now, NullIf
+from django.db.models import Avg, CharField, Count, DurationField, Exists, F, IntegerField, Max, Min, OuterRef, Q, Value
+from django.db.models.functions import Cast, Coalesce, Concat, Now, NullIf
 from django.db.models.query import Prefetch
 from django.shortcuts import get_object_or_404
 from django.template.loader import render_to_string
@@ -39,7 +38,7 @@ from rest_framework.views import APIView
 
 from arkindex.documents.models import Corpus, Element, Selection
 from arkindex.ponos.authentication import TaskAuthentication
-from arkindex.ponos.models import STATES_ORDERING, State, Task
+from arkindex.ponos.models import FINAL_STATES, STATES_ORDERING, State, Task
 from arkindex.ponos.permissions import IsTask
 from arkindex.process.models import (
     ActivityState,
@@ -57,7 +56,6 @@ from arkindex.process.models import (
     WorkerRun,
     WorkerType,
     WorkerVersion,
-    WorkerVersionGPUUsage,
     WorkerVersionState,
 )
 from arkindex.process.providers import GitProvider
@@ -70,6 +68,7 @@ from arkindex.process.serializers.imports import (
     ElementsWorkflowSerializer,
     FilesProcessSerializer,
     ImportTranskribusSerializer,
+    ProcessDetailsSerializer,
     ProcessElementLightSerializer,
     ProcessElementSerializer,
     ProcessListSerializer,
@@ -106,7 +105,6 @@ from arkindex.project.pagination import CustomCursorPagination
 from arkindex.project.permissions import IsVerified, IsVerifiedOrReadOnly
 from arkindex.project.tools import PercentileCont, RTrimChr
 from arkindex.project.triggers import process_delete
-from arkindex.training.models import ModelVersionState
 from arkindex.users.models import OAuthCredentials, Role, Scope
 from arkindex.users.utils import get_max_level
 
@@ -139,7 +137,7 @@ logger = logging.getLogger(__name__)
             OpenApiParameter(
                 'state',
                 enum=[state.value for state in State],
-                description='Filter processes by workflow state',
+                description='Filter processes by state',
                 required=False,
             ),
             OpenApiParameter(
@@ -153,10 +151,10 @@ logger = logging.getLogger(__name__)
                 required=False,
             ),
             OpenApiParameter(
-                'with_workflow',
+                'with_tasks',
                 type=bool,
-                description='Restrict to or exclude processes with workflows. '
-                            'This has no effect when filtering by the `template` or `local` modes.',
+                description='Restrict to or exclude processes with tasks. '
+                            'This has no effect when filtering by the `template` or `local` modes, as those cannot have tasks.',
                 required=False,
             )
         ]
@@ -195,11 +193,12 @@ class ProcessList(ProcessACLMixin, ListAPIView):
             # Exclude local processes by default
             filters &= ~Q(mode=ProcessMode.Local)
 
-        # When listing template or local processes, the workflow filter makes no sense
+        # When listing template or local processes, the task filter makes no sense
+        with_tasks = None
         if process_mode not in (ProcessMode.Template, ProcessMode.Local):
-            if 'with_workflow' in self.request.query_params:
-                with_workflow = self.request.query_params['with_workflow']
-                filters &= Q(workflow__isnull=bool(with_workflow and with_workflow.lower() in ('false', '0')))
+            if 'with_tasks' in self.request.query_params:
+                with_tasks = self.request.query_params['with_tasks'].lower() not in ('false', '0')
+                filters &= Q(tasks__isnull=not with_tasks)
 
         if 'created' in self.request.query_params:
             created = self.request.query_params['created']
@@ -214,64 +213,137 @@ class ProcessList(ProcessACLMixin, ListAPIView):
         if 'name' in self.request.query_params:
             filters &= Q(name__icontains=self.request.query_params['name'])
 
-        qs = self.readable_processes \
-            .filter(filters) \
-            .prefetch_related('workflow__tasks')
-        # Order workflow by completion date when available, or by date of last updated task in workflow
-        qs = qs.annotate(
-            last_date=Greatest(Max('workflow__tasks__updated'), 'updated'),
-            date_order=Coalesce('workflow__finished', 'last_date'),
-            last_run=Max('workflow__tasks__run')
+        qs = (
+            self.readable_processes
+            .filter(filters)
+            # Order processes by completion date when available, or start date, or last update
+            .annotate(date_order=Coalesce('finished', 'started', 'updated'))
         )
 
         state_value = self.request.query_params.get('state')
+
+        if with_tasks is False or process_mode in (ProcessMode.Local, ProcessMode.Template):
+            # If we are filtering to only include processes with no tasks, or process modes that never have tasks,
+            # then it is pointless to retrieve the tasks, and we know there is no last_run.
+            qs = qs.annotate(last_run=Value(None, output_field=IntegerField()))
+        else:
+            # Otherwise, fetch all tasks.
+            qs = qs.prefetch_related('tasks')
+            # Only define `last_run` if we filter by state, since we will need it to do the filtering.
+            # `last_run` is otherwise unnecessary, since it can be computed from all the prefetched tasks.
+            if state_value:
+                qs = qs.annotate(last_run=Max('tasks__run'))
+
         if state_value:
             try:
                 state = State(self.request.query_params['state'])
             except ValueError:
                 raise ValidationError({'state': [f"State '{state_value}' does not exist"]})
+
             # Filter out processes which have a task with an incompatible state on their last run
             excluding_states = STATES_ORDERING[:STATES_ORDERING.index(state)]
             excluded_processes = qs.filter(
-                Q(workflow__tasks__run=F('last_run')),
-                Q(workflow__tasks__state__in=excluding_states)
+                Q(tasks__run=F('last_run')),
+                Q(tasks__state__in=excluding_states)
             )
             # Keep non excluded processes matching the state on their last run tasks
-            state_query = Q(workflow__tasks__run=F('last_run')) & Q(workflow__tasks__state=state)
+            state_query = Q(tasks__run=F('last_run')) & Q(tasks__state=state)
             if state == State.Unscheduled:
-                # Handle a workflow with no task as unscheduled
-                state_query |= Q(workflow__tasks__isnull=True)
+                # Handle the absence of tasks as unscheduled
+                state_query |= Q(tasks__isnull=True)
             qs = qs.filter(state_query).exclude(id__in=excluded_processes.values('id')).distinct()
 
         return qs.order_by('-date_order')
 
 
+class ProcessQuerysetMixin(object):
+    """
+    Optimized queryset for Retrieve/Update/PartialUpdate/Destroy/RetryProcess
+    """
+
+    def get_queryset(self):
+        queryset = (
+            Process
+            .objects
+            # Needed for ACL checks
+            .select_related('corpus')
+            # Needed for Process.state
+            .prefetch_related('tasks')
+            .annotate(last_run=Max('tasks__run'))
+        )
+
+        # When not deleting a process, we will be returning the whole process again, so we need to fetch from more tables
+        if self.request.method.lower() != 'delete':
+            queryset = (
+                queryset
+                .select_related(
+                    'corpus',
+                    'farm',
+                    # When an element is set on the process, it is serialized with its image URL and type
+                    'element__image__server',
+                    'element__type',
+                    # Element and folder types are serialized as their slugs
+                    'element_type',
+                    'folder_type',
+                    # The revision is serialized with its commit URL, which also requires the repository
+                    'revision__repo',
+                )
+                # Files and tasks are also listed
+                .prefetch_related('files', 'tasks__parents')
+            )
+
+        return queryset
+
+
 @extend_schema(tags=['process'])
 @extend_schema_view(
-    get=extend_schema(description='Retrieve details of a process.\n\nRequires a **guest** access.'),
-    patch=extend_schema(description='Partially update a process.\n\nRequires an **admin** access.'),
-    put=extend_schema(description='Update dependencies of a process.\n\nRequires an **admin** access.'),
+    get=extend_schema(
+        operation_id='RetrieveProcess',
+        description=dedent("""
+            Retrieve details of a process.
+
+            Requires a **guest** access.
+        """),
+    ),
+    patch=extend_schema(
+        operation_id='PartialUpdateProcess',
+        description=dedent("""
+            Partially update a process.
+
+            Requires an **admin** access.
+        """),
+    ),
+    put=extend_schema(
+        operation_id='UpdateProcess',
+        description=dedent("""
+            Update a process.
+
+            Requires an **admin** access.
+        """),
+    ),
     delete=extend_schema(
-        description=(
-            'Delete a process.\n\n'
-            'Cannot be used on currently running processes. '
-            'Requires an **admin** access to the process.\n\n'
-            'Triggers an asynchronous deletion of the process if it has linked '
-            'activities and return a **HTTP_202_ACCEPTED**.'
-        ),
+        operation_id='DestroyProcess',
+        description=dedent("""
+            Delete a process.
+
+            Cannot be used on currently running processes. Requires an **admin** access to the process.
+
+            If the process has worker activities, an asynchronous deletion is triggered and **HTTP 202 Accepted** is returned.
+
+            Otherwise, the process is deleted immediately and HTTP 204 is returned.
+        """),
         responses={
             202: None,
             204: None,
         },
     ),
 )
-class ProcessEdit(ProcessACLMixin, RetrieveUpdateDestroyAPIView):
+class ProcessDetails(ProcessACLMixin, ProcessQuerysetMixin, RetrieveUpdateDestroyAPIView):
+    """
+    Retrieve a process
+    """
     permission_classes = (IsVerified, )
-    serializer_class = ProcessSerializer
-    queryset = Process.objects \
-        .select_related('corpus', 'workflow') \
-        .prefetch_related('workflow__tasks') \
-        .annotate(last_run=Max('workflow__tasks__run'))
+    serializer_class = ProcessDetailsSerializer
 
     def get_object(self):
         if not hasattr(self, '_process'):
@@ -299,7 +371,7 @@ class ProcessEdit(ProcessACLMixin, RetrieveUpdateDestroyAPIView):
             raise PermissionDenied(detail='You do not have a sufficient access level to this process.')
 
         if request.method == 'DELETE' and process.state == State.Running:
-            raise ValidationError({'__all__': ['Cannot delete a workflow while it is running']})
+            raise ValidationError({'__all__': ['Cannot delete a process while it is running']})
 
     def destroy(self, request, *args, **kwargs):
         # If the process has no activity, deletes it directly. Triggers an async deletion otherwise.
@@ -313,17 +385,13 @@ class ProcessEdit(ProcessACLMixin, RetrieveUpdateDestroyAPIView):
         return Response(status=status.HTTP_204_NO_CONTENT)
 
 
-class ProcessRetry(ProcessACLMixin, GenericAPIView):
+class ProcessRetry(ProcessACLMixin, ProcessQuerysetMixin, GenericAPIView):
     """
     Retry a process. Can only be used on processes with Error, Failed, Stopped or Completed states.\n\n
     Requires an **admin** access to the process.
     """
     permission_classes = (IsVerified, )
     serializer_class = ProcessSerializer
-    queryset = Process.objects \
-        .select_related('corpus', 'workflow') \
-        .prefetch_related('workflow__tasks') \
-        .annotate(last_run=Max('workflow__tasks__run'))
 
     def check_object_permissions(self, request, process):
         super().check_object_permissions(request, process)
@@ -336,13 +404,13 @@ class ProcessRetry(ProcessACLMixin, GenericAPIView):
 
         # process.state can cause new SQL queries to be run, so we access it just once
         state = process.state
-        # Allow 'retrying' a process that has no Ponos workflow (that has never been started)
-        if process.workflow is not None and state in (State.Unscheduled, State.Pending):
-            raise ValidationError({'__all__': ['This workflow is already pending']})
+        # Allow 'retrying' a process that has no Ponos tasks (that has never been started)
+        if len(process.tasks.all()) and state in (State.Unscheduled, State.Pending):
+            raise ValidationError({'__all__': ['This process is already pending']})
         elif state == State.Running:
-            raise ValidationError({'__all__': ['This workflow is already running']})
+            raise ValidationError({'__all__': ['This process is already running']})
         elif state == State.Stopping:
-            raise ValidationError({'__all__': ['This workflow is stopping']})
+            raise ValidationError({'__all__': ['This process is stopping']})
 
     @extend_schema(
         operation_id='RetryProcess',
@@ -409,7 +477,7 @@ class FilesProcess(CreateAPIView):
 )
 class CorpusWorkflow(SelectionMixin, CorpusACLMixin, CreateAPIView):
     """
-    Create a distributed workflow from elements of an Arkindex corpus.\n\n
+    Create a distributed process from elements of an Arkindex corpus.\n\n
     Requires an **admin** access to the corpus.
     """
     permission_classes = (IsVerified, )
@@ -463,89 +531,56 @@ class CorpusWorkflow(SelectionMixin, CorpusACLMixin, CreateAPIView):
         )
 
 
-class StartProcess(CorpusACLMixin, APIView):
+@extend_schema_view(
+    post=extend_schema(
+        operation_id='StartProcess',
+        tags=['process'],
+        responses=ProcessSerializer,
+    ),
+)
+class StartProcess(CorpusACLMixin, CreateAPIView):
     """
     Start a process, used to build a Workflow with Workers.\n\n
     Requires an **admin** access to the corpus of this process.
     """
     permission_classes = (IsVerified, )
-    # For OpenAPI type discovery
-    queryset = Process.objects.none()
-
-    @extend_schema(
-        operation_id='StartProcess',
-        tags=['process'],
-        request=StartProcessSerializer,
-        responses=ProcessSerializer
+    serializer_class = StartProcessSerializer
+    queryset = (
+        Process
+        .objects
+        .select_related('corpus')
+        .filter(corpus_id__isnull=False)
+        .prefetch_related(Prefetch('worker_runs', queryset=WorkerRun.objects.select_related('version', 'model_version')))
+        # Uses Exists() for has_tasks and not a __isnull because we are not joining on tasks and do not need to fetch them
+        .annotate(has_tasks=Exists(Task.objects.filter(process=OuterRef('pk'))))
     )
-    def post(self, request, pk=None, **kwargs):
 
-        qs = Process.objects \
-            .select_related('corpus') \
-            .filter(corpus_id__isnull=False) \
-            .prefetch_related(Prefetch('worker_runs', queryset=WorkerRun.objects.select_related('version', 'model_version')))
-
-        process = get_object_or_404(qs, pk=self.kwargs['pk'])
+    def check_object_permissions(self, request, process):
+        super().check_object_permissions(request, process)
 
         if not self.has_admin_access(process.corpus):
             raise PermissionDenied(detail='You do not have an admin access to the corpus of this process.')
 
-        if process.mode != ProcessMode.Workers or process.workflow is not None:
+        if process.mode != ProcessMode.Workers or process.has_tasks:
             raise ValidationError(
                 {'__all__': ['Only a Process with Workers mode and not already launched can be started later on']})
 
-        serializer = StartProcessSerializer(data=request.data)
+    def create(self, request, *args, **kwargs):
+        serializer = self.get_serializer(data=request.data, instance=self.get_object())
         serializer.is_valid(raise_exception=True)
 
-        data = serializer.validated_data
-
-        errors = defaultdict(list)
-        # Use process.worker_runs.all() to access the (prefetched) worker_runs to avoid new SQL queries
-        # The related version have also been prefetched
-        if len(list(process.worker_runs.all())) > 0:
-            if data.get('use_gpu') and (not any(item.version.gpu_usage != WorkerVersionGPUUsage.Disabled for item in process.worker_runs.all())):
-                errors['use_gpu'] = 'The process is configured to use GPU, but does not include any workers that support GPU usage.'
-
-            # Check if a worker run has no model version but version.model_usage = True
-            missing_model_versions = []
-            unavailable_versions = []
-            has_unavailable_model_versions = False
-
-            for worker_run in process.worker_runs.all():
-                if worker_run.version.model_usage and worker_run.model_version_id is None:
-                    missing_model_versions.append(worker_run.version.worker.name)
-                if worker_run.version.state != WorkerVersionState.Available or worker_run.version.docker_image_id is None:
-                    unavailable_versions.append(worker_run.version.id)
-                if worker_run.model_version_id and worker_run.model_version.state != ModelVersionState.Available:
-                    has_unavailable_model_versions = True
-
-            if len(missing_model_versions) > 0:
-                errors['model_version'].append(f"The following workers require a model version and none was set: {missing_model_versions}")
-            if has_unavailable_model_versions:
-                errors['model_version'].append('This process contains one or more unavailable model versions and cannot be started.')
-            if len(unavailable_versions) > 0:
-                errors['version'] = 'This process contains one or more unavailable worker versions and cannot be started.'
-
-        else:
-            if data.get('worker_activity'):
-                errors['worker_activity'] = 'The process must have workers attached to handle their activity.'
-            if data.get('use_cache'):
-                errors['use_cache'] = 'The process must have workers attached to use cached results.'
-            if data.get('use_gpu'):
-                errors['use_gpu'] = 'The process must have workers attached to use GPUs.'
-            if not data.get('thumbnails'):
-                errors['__all__'] = ['The process must either use thumbnail generation or have worker runs.']
-        if errors:
-            raise ValidationError(errors)
+        self.perform_create(serializer)
 
-        process.start(**serializer.validated_data)
+        headers = self.get_success_headers(serializer.data)
+        response_serializer = ProcessSerializer(
+            serializer.instance,
+            context={'request': request},
+        )
 
         return Response(
-            status=status.HTTP_200_OK,
-            data=ProcessSerializer(
-                process,
-                context={'request': request}
-            ).data,
+            response_serializer.data,
+            status=status.HTTP_201_CREATED,
+            headers=headers,
         )
 
 
@@ -1202,7 +1237,11 @@ class WorkerRunList(WorkerACLMixin, ListCreateAPIView):
     @cached_property
     def process(self):
         process = get_object_or_404(
-            Process.objects.filter(corpus_id__isnull=False).select_related('corpus'),
+            Process
+            .objects
+            .filter(corpus_id__isnull=False)
+            .select_related('corpus')
+            .annotate(has_tasks=Exists(Task.objects.filter(process=OuterRef('pk')))),
             pk=self.kwargs['pk']
         )
 
@@ -1256,11 +1295,15 @@ class WorkerRunList(WorkerACLMixin, ListCreateAPIView):
         if self.process.mode != ProcessMode.Workers:
             raise ValidationError({'process': ['Import mode must be Workers']})
 
-        if self.process.workflow_id is not None:
+        if self.process.has_tasks:
             raise ValidationError({'__all__': ["Cannot create a WorkerRun on a Process that has already started"]})
 
         serializer.save(process=self.process, configuration=configuration)
 
+        # Since we only allow creating a WorkerRun on a process without any task, we know the last run is None,
+        # without having to make any extra SQL queries to serialize it.
+        self.process.last_run = None
+
 
 @extend_schema(tags=['process'])
 @extend_schema_view(
@@ -1292,17 +1335,38 @@ class WorkerRunDetails(CorpusACLMixin, RetrieveUpdateDestroyAPIView):
     def get_object(self):
         if not hasattr(self, '_worker_run'):
             self._worker_run = super().get_object()
+
+            # To avoid an extra unnecessary query for the process' last run, we set it here.
+            # We will only serialize the process with its state when the query is successful,
+            # and we only allow editing WorkerRuns on processes that have no tasks at all,
+            # therefore the last run is always None.
+            self._worker_run.process.last_run = None
+
         return self._worker_run
 
     def get_queryset(self):
-        # Use default DB to avoid a race condition checking process workflow
-        return WorkerRun.objects \
+        queryset = WorkerRun.objects \
             .filter(
                 ~Q(process__corpus_id=None)
                 | Q(process__creator_id=self.user.id, process__mode=ProcessMode.Local)
             ) \
-            .using('default') \
-            .select_related('version__worker__type', 'configuration', 'process__workflow', 'process__corpus', 'version__revision__repo')
+            .select_related('process__corpus') \
+            .annotate(process_has_tasks=Exists(Task.objects.filter(process=OuterRef('process_id'))))
+
+        if self.request.method.lower() != 'delete':
+            # Selecting those extra tables is only necessary when the request will return the serialized WorkerRun
+            queryset = (
+                queryset
+                .select_related(
+                    'version__worker__type',
+                    'version__revision__repo',
+                    'configuration',
+                    'model_version__model',
+                )
+                .prefetch_related('version__revision__refs')
+            )
+
+        return queryset
 
     def get_serializer_context(self):
         context = super().get_serializer_context()
@@ -1316,7 +1380,7 @@ class WorkerRunDetails(CorpusACLMixin, RetrieveUpdateDestroyAPIView):
 
         # Updating a worker run is not possible once the process is started
         if request.method not in permissions.SAFE_METHODS:
-            if worker_run.process.workflow_id is not None:
+            if worker_run.process_has_tasks:
                 raise ValidationError({'__all__': ["Cannot update a WorkerRun on a Process that has already started"]})
             if worker_run.process.mode == ProcessMode.Local:
                 raise ValidationError({'__all__': ['Cannot update a WorkerRun on a local process']})
@@ -1847,7 +1911,7 @@ class ClearProcess(ProcessACLMixin, DestroyAPIView):
     Remove all templates and/or workers from a (not started) process
     """
     permission_classes = (IsVerified, )
-    queryset = Process.objects.all()
+    queryset = Process.objects.all().annotate(has_tasks=Exists(Task.objects.filter(process=OuterRef('pk'))))
 
     def check_object_permissions(self, request, process):
         super().check_object_permissions(request, process)
@@ -1859,7 +1923,7 @@ class ClearProcess(ProcessACLMixin, DestroyAPIView):
             raise NotFound
         if access_level < required_access:
             raise PermissionDenied(detail='You do not have a sufficient access level to this process.')
-        if process.workflow_id is not None:
+        if process.has_tasks:
             raise ValidationError({'__all__': ['A process can only be cleared before getting started.']})
 
     def destroy(self, request, *args, **kwargs):
@@ -1964,6 +2028,7 @@ class SelectProcessFailures(ProcessACLMixin, CreateAPIView):
         Select elements of a process that have any worker activity failure.
         The process must be finished and of type "workers".
         """
+        # TODO: Move this to the `queryset` class attribute and make this endpoint more in line with best practices
         qs = (
             Process.objects
             .select_related('corpus')
@@ -1971,6 +2036,12 @@ class SelectProcessFailures(ProcessACLMixin, CreateAPIView):
                 mode=ProcessMode.Workers,
                 corpus_id__isnull=False,
             )
+            .annotate(has_non_final_tasks=Exists(
+                Task
+                .objects
+                .filter(process=OuterRef('pk'))
+                .exclude(state__in=FINAL_STATES)
+            ))
         )
         process = get_object_or_404(qs, pk=self.kwargs['pk'])
         access_level = self.process_access_level(process)
@@ -1980,7 +2051,7 @@ class SelectProcessFailures(ProcessACLMixin, CreateAPIView):
             raise ValidationError({
                 '__all__': ['The process must use worker activities to select elements with failures.']
             })
-        if process.workflow is None or not process.workflow.is_final():
+        if process.has_non_final_tasks:
             raise ValidationError({
                 '__all__': ['The process must be finished to select elements with failures.']
             })
diff --git a/arkindex/process/migrations/0004_process_farm_started_finished.py b/arkindex/process/migrations/0004_process_farm_started_finished.py
new file mode 100644
index 0000000000..d90caa49ee
--- /dev/null
+++ b/arkindex/process/migrations/0004_process_farm_started_finished.py
@@ -0,0 +1,51 @@
+# Generated by Django 4.1.7 on 2023-06-01 09:40
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('ponos', '0001_initial'),
+        ('process', '0003_initial'),
+    ]
+
+    operations = [
+        migrations.AddField(
+            model_name='process',
+            name='farm',
+            field=models.ForeignKey(
+                on_delete=models.DO_NOTHING,
+                related_name='processes',
+                to='ponos.Farm',
+                blank=True,
+                null=True,
+            ),
+        ),
+        migrations.AddField(
+            model_name='process',
+            name='finished',
+            field=models.DateTimeField(blank=True, null=True),
+        ),
+        migrations.AddField(
+            model_name='process',
+            name='started',
+            field=models.DateTimeField(blank=True, null=True),
+        ),
+        migrations.AddConstraint(
+            model_name='process',
+            constraint=models.CheckConstraint(
+                check=models.Q(started=None) | models.Q(started__gte=models.F('created')),
+                name='process_started_after_created',
+                violation_error_message='The process start date must not be earlier than the process creation date.',
+            ),
+        ),
+        migrations.AddConstraint(
+            model_name='process',
+            constraint=models.CheckConstraint(
+                check=models.Q(finished=None) | models.Q(finished__gte=models.F('started')),
+                name='process_finished_after_started',
+                violation_error_message='The process finish date must not be earlier than the process start date.',
+            ),
+        ),
+    ]
diff --git a/arkindex/process/migrations/0005_migrate_workflows.py b/arkindex/process/migrations/0005_migrate_workflows.py
new file mode 100644
index 0000000000..85d888a1ab
--- /dev/null
+++ b/arkindex/process/migrations/0005_migrate_workflows.py
@@ -0,0 +1,117 @@
+# Generated by Django 4.1.7 on 2023-06-01 09:44
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('ponos', '0002_task_process'),
+        ('process', '0004_process_farm_started_finished'),
+        ('training', '0002_initial'),
+    ]
+
+    operations = [
+        migrations.RunSQL(
+            [
+                # This query cannot be done with QuerySet.update, because Django does not allow joining during an update.
+                # This is why we use RunSQL here and not RunPython.
+                """
+                UPDATE process_process process
+                SET started = workflow.created, finished = workflow.finished, farm_id = workflow.farm_id
+                FROM ponos_workflow workflow
+                WHERE process.workflow_id = workflow.id
+                """,
+                """
+                UPDATE ponos_task task
+                SET process_id = process.id
+                FROM process_process process
+                WHERE process.workflow_id = task.workflow_id
+                """,
+
+                # We might have some tasks that are in workflows that do not have any processes.
+                # We have no choice but to delete those tasks, so we also set foreign keys to NULL or delete related rows.
+
+                # Unlink the Docker image artifacts from worker versions for the tasks we are about to delete
+                """
+                UPDATE process_workerversion version
+                SET state = 'error', docker_image_id = NULL
+                FROM ponos_artifact artifact, ponos_task task
+                WHERE version.docker_image_id = artifact.id
+                AND artifact.task_id = task.id
+                AND task.process_id IS NULL
+                """,
+                # Unlink the Docker image artifacts from tasks for the tasks we are about to delete
+                """
+                UPDATE ponos_task task
+                SET image_artifact_id = NULL
+                FROM ponos_artifact artifact, ponos_task task2
+                WHERE task.image_artifact_id = artifact.id
+                AND artifact.task_id = task2.id
+                AND task2.process_id IS NULL
+                """,
+                # Remove the artifacts of the tasks we are about to delete
+                """
+                DELETE FROM ponos_artifact artifact
+                USING ponos_task task
+                WHERE artifact.task_id = task.id
+                AND task.process_id IS NULL
+                """,
+                # Unlink the tasks we are about to delete from datasets
+                """
+                UPDATE training_dataset dataset
+                SET task_id = NULL
+                FROM ponos_task task
+                WHERE dataset.task_id = task.id
+                AND task.process_id IS NULL
+                """,
+                # Remove task parents
+                """
+                DELETE FROM ponos_task_parents parents
+                USING ponos_task task
+                WHERE parents.from_task_id = task.id
+                AND task.process_id IS NULL
+                """,
+                """
+                DELETE FROM ponos_task_parents parents
+                USING ponos_task task
+                WHERE parents.to_task_id = task.id
+                AND task.process_id IS NULL
+                """,
+                # Remove the tasks
+                """
+                DELETE FROM ponos_task
+                WHERE process_id IS NULL
+                """,
+            ],
+            reverse_sql=[
+                # When reverting, we need to recreate workflows for any process that has task, then reassign the tasks to the workflows.
+                # For convenience, we will use the process IDs as the new workflow IDs, since nothing forbids that.
+                """
+                INSERT INTO ponos_workflow (id, farm_id, created, updated, finished)
+                SELECT id, farm_id, started, updated, finished
+                FROM process_process process
+                WHERE EXISTS (
+                    SELECT 1
+                    FROM ponos_task
+                    WHERE process_id = process.id
+                )
+                """,
+                """
+                UPDATE process_process process
+                SET workflow_id = id
+                WHERE EXISTS (
+                    SELECT 1
+                    FROM ponos_task
+                    WHERE process_id = process.id
+                )
+                """,
+                """
+                UPDATE ponos_task
+                SET workflow_id = process_id
+                """,
+            ],
+            # manage.py squashmigrations is allowed to remove this data migration
+            elidable=True,
+        ),
+    ]
diff --git a/arkindex/process/migrations/0006_remove_process_workflow.py b/arkindex/process/migrations/0006_remove_process_workflow.py
new file mode 100644
index 0000000000..ddef48dc95
--- /dev/null
+++ b/arkindex/process/migrations/0006_remove_process_workflow.py
@@ -0,0 +1,21 @@
+# Generated by Django 4.1.7 on 2023-06-01 09:44
+
+from django.db import migrations
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('process', '0005_migrate_workflows'),
+    ]
+
+    operations = [
+        migrations.RemoveConstraint(
+            model_name='process',
+            name='local_process_no_workflow',
+        ),
+        migrations.RemoveField(
+            model_name='process',
+            name='workflow',
+        ),
+    ]
diff --git a/arkindex/process/models.py b/arkindex/process/models.py
index 6489535faa..5bd377335a 100644
--- a/arkindex/process/models.py
+++ b/arkindex/process/models.py
@@ -7,8 +7,9 @@ from django.conf import settings
 from django.contrib.contenttypes.fields import GenericRelation
 from django.core.validators import MinLengthValidator
 from django.db import models, transaction
-from django.db.models import Q
+from django.db.models import F, Q
 from django.urls import reverse
+from django.utils import timezone
 from django.utils.functional import cached_property
 from enumfields import Enum, EnumField
 from rest_framework.exceptions import ValidationError
@@ -16,7 +17,7 @@ from rest_framework.exceptions import ValidationError
 import pgtrigger
 from arkindex.documents.models import Element
 from arkindex.images.models import ImageServer
-from arkindex.ponos.models import Artifact, State, Task, Workflow, task_token_default
+from arkindex.ponos.models import FINAL_STATES, STATES_ORDERING, Artifact, State, Task, task_token_default
 from arkindex.process.managers import ActivityManager, CorpusWorkerVersionManager, WorkerVersionManager
 from arkindex.process.providers import get_provider
 from arkindex.process.utils import get_default_farm_id
@@ -66,9 +67,6 @@ class ProcessMode(Enum):
 
 
 class Process(IndexableModel):
-    """
-    A single import workflow
-    """
 
     name = models.CharField(null=True, blank=True, max_length=100)
     creator = models.ForeignKey('users.User', on_delete=models.CASCADE, related_name='processes')
@@ -77,11 +75,21 @@ class Process(IndexableModel):
     files = models.ManyToManyField('process.DataFile', related_name='processes')
     revision = models.ForeignKey(
         'process.Revision', related_name='processes', on_delete=models.CASCADE, blank=True, null=True)
-    workflow = models.OneToOneField('ponos.Workflow', on_delete=models.SET_NULL, null=True, blank=True)
     versions = models.ManyToManyField('process.WorkerVersion', through='process.WorkerRun', related_name='processes')
     activity_state = EnumField(ActivityState, max_length=32, default=ActivityState.Disabled)
 
-    # Used to define the root element on which the workflow will run
+    started = models.DateTimeField(blank=True, null=True)
+    finished = models.DateTimeField(blank=True, null=True)
+
+    farm = models.ForeignKey(
+        to="ponos.Farm",
+        related_name="processes",
+        on_delete=models.DO_NOTHING,
+        blank=True,
+        null=True,
+    )
+
+    # Used to define the root element on which the process will run
     element = models.ForeignKey('documents.Element', on_delete=models.SET_NULL, null=True, blank=True)
 
     # Used to define the element type during folder elements creation for Images, PDF, IIIF and S3 imports
@@ -125,7 +133,7 @@ class Process(IndexableModel):
     # Allows to use a GPU in Workers processes
     use_gpu = models.BooleanField(default=False, blank=True)
 
-    # Refers a workflow template
+    # Refers a process template
     template = models.ForeignKey(
         'process.Process',
         on_delete=models.SET_NULL,
@@ -179,12 +187,6 @@ class Process(IndexableModel):
         ordering = ['corpus', '-created']
         verbose_name_plural = 'processes'
         constraints = [
-            models.CheckConstraint(
-                # Allow workflow to be both None and not None on any mode, except local
-                check=~Q(mode=ProcessMode.Local) | Q(workflow=None),
-                name='local_process_no_workflow',
-                violation_error_message='Local processes cannot be started.',
-            ),
             models.CheckConstraint(
                 # Either the process mode is local or repository and the process does not have a corpus,
                 # or it has another mode and has a corpus set.
@@ -198,20 +200,120 @@ class Process(IndexableModel):
                 name='unique_local_process',
                 violation_error_message='Only one local process is allowed per user.',
             ),
+            models.CheckConstraint(
+                check=Q(started=None) | Q(started__gte=F('created')),
+                name='process_started_after_created',
+                violation_error_message='The process start date must not be earlier than the process creation date.',
+            ),
+            models.CheckConstraint(
+                # This also implies that `started` must not be None if `finished` is set,
+                # since finished >= NULL would be NULL and not TRUE.
+                check=Q(finished=None) | Q(finished__gte=F('started')),
+                name='process_finished_after_started',
+                violation_error_message='The process finish date must not be earlier than the process start date.',
+            ),
         ]
 
     @property
-    def state(self):
-        if not self.workflow:
-            return State.Unscheduled
-        # This allows annotating a Process queryset with "last_run" and preventing duplicate SQL queries
+    def state(self) -> State:
+        """
+        Deduce the process state from the tasks of its latest run.
+        A process' state is deduced by picking the first state that any tasks in this run have, in the order defined by STATES_ORDERING.
+        """
+        return self.get_state(self.get_last_run())
+
+    @property
+    def is_final(self) -> bool:
+        return self.state in FINAL_STATES
+
+    @property
+    def has_prefetched_tasks(self):
+        """
+        Returns whether or not all tasks of this process have been prefetched with .prefetch_related()
+        or prefetch_related_objects().
+        Prefetched tasks will only be accessible through `self.tasks.all()` and using any other method will cause another SQL query.
+        Therefore, if this property is True, only .all() should be used, and Python methods should be used to filter or aggregate, instead of `.exists`, `.filter` or `.aggregate`.
+
+        See https://stackoverflow.com/a/19651840/5990435
+        """
+        return (
+            hasattr(self, "_prefetched_objects_cache")
+            and self.tasks.field.remote_field.get_cache_name()
+            in self._prefetched_objects_cache
+        )
+
+    @property
+    def expiry(self):
+        """
+        A process' expiry date. This is the latest expiry date of its tasks.
+        No action is taken when a process is expired; the `cleanup` command alone uses it.
+        """
+        if self.has_prefetched_tasks:
+            return max((t.expiry for t in self.tasks.all()), default=None)
+        else:
+            return self.tasks.aggregate(models.Max("expiry"))["expiry__max"]
+
+    def get_last_run(self) -> int:
+        """
+        Get the last run number. If the `last_run` attribute is defined on this process,
+        possibly from a `.annotate(last_run=Max('tasks__run'))` annotation in a Django QuerySet,
+        this method will return the attribute's value instead of making another SQL query.
+        """
+        if not hasattr(self, "last_run"):
+            # This prevents performing another SQL request when tasks have already been prefetched.
+            # See https://stackoverflow.com/a/19651840/5990435
+            if self.has_prefetched_tasks:
+                self.last_run = max((t.run for t in self.tasks.all()), default=None)
+            else:
+                self.last_run = self.tasks.all().aggregate(models.Max("run"))["run__max"]
+
+        if self.last_run is None:
+            return -1
+
+        return self.last_run
+
+    def refresh_from_db(self, *args, **kwargs) -> None:
+        # Remove the cached last_run if it was set, to ensure the process is properly refreshed
         if hasattr(self, 'last_run'):
-            # last_run may be None when there is a workflow without any tasks
-            if self.last_run is None:
-                return State.Unscheduled
-            return self.workflow.get_state(self.last_run)
+            del self.last_run
+        super().refresh_from_db(*args, **kwargs)
+
+    def get_state(self, run) -> State:
+        """
+        A separate method to get a process' state on a given run.
+
+        Most users will only use the Process.state property to get the state for a process' last run.
+        However, when trying to get a state for many processes at once, using ``.annotate(last_run=Max('tasks__run'))``
+        and using the annotation with this method will prevent many useless SQL requests.
+
+        Further performance improvements can be achieved with ``prefetch_related('tasks')``.
+        """
+        # Negative run numbers never have tasks
+        if run < 0:
+            return State.Unscheduled
+
+        # This prevents performing another SQL request when tasks have already been prefetched.
+        # See https://stackoverflow.com/a/19651840/5990435
+        if self.has_prefetched_tasks:
+            task_states = set(t.state for t in self.tasks.all() if t.run == run)
         else:
-            return self.workflow.state
+            task_states = set(
+                self.tasks.filter(run=run).values_list("state", flat=True)
+            )
+
+        # This run has no tasks
+        if not task_states:
+            return State.Unscheduled
+
+        # All tasks have the same state
+        if len(task_states) == 1:
+            return task_states.pop()
+
+        for state in STATES_ORDERING:
+            if state in task_states:
+                return state
+
+        raise NotImplementedError("Something went wrong")
 
     def _get_filters(self):
         filters = {
@@ -301,7 +403,6 @@ class Process(IndexableModel):
         self,
         command,
         slug,
-        workflow,
         env,
         depth=0,
         run=0,
@@ -327,11 +428,11 @@ class Process(IndexableModel):
         env['ARKINDEX_TASK_TOKEN'] = token
 
         task = Task(
+            process=self,
             command=command,
             slug=slug,
             tags=tags,
             depth=depth,
-            workflow=workflow,
             env=env,
             token=token,
             run=run,
@@ -349,21 +450,18 @@ class Process(IndexableModel):
 
     @transaction.atomic
     def build_workflow(self, farm=None, chunks=None, thumbnails=False, corpus_id=None, **kwargs):
-        '''
-        Create a ponos workflow with tasks according to configuration
-        '''
+        """
+        Create Ponos tasks according to configuration
+        """
         ml_workflow_chunks = 1
         import_task_name = 'import'
         # Use the default Ponos farm if no farm is specified
-        farm_id = farm and farm.id or get_default_farm_id()
+        self.farm_id = farm.id if farm is not None else get_default_farm_id()
         env = settings.PONOS_DEFAULT_ENV.copy()
         env['ARKINDEX_PROCESS_ID'] = str(self.pk)
         if self.corpus_id:
             env['ARKINDEX_CORPUS_ID'] = str(self.corpus_id)
 
-        workflow = Workflow.objects.create(farm_id=farm_id)
-        workflow.last_run = 0
-
         if self.mode == ProcessMode.Repository:
             assert self.revision is not None, \
                 'A revision is required to create an import workflow from GitLab repository'
@@ -375,7 +473,6 @@ class Process(IndexableModel):
                     + (f' --corpus-id {corpus_id}' if corpus_id else '')
                 ),
                 slug=import_task_name,
-                workflow=workflow,
                 env=env
             )
             import_task.save()
@@ -387,7 +484,6 @@ class Process(IndexableModel):
             import_task = self.build_task(
                 command=f'python -m arkindex_tasks.import_iiif.process {self.id}',
                 slug=import_task_name,
-                workflow=workflow,
                 env=env
             )
             import_task.save()
@@ -402,7 +498,6 @@ class Process(IndexableModel):
             transkribus_task = self.build_task(
                 command=f'python -m arkindex_tasks.export_transkribus {self.collection_id}',
                 slug='export_transkribus',
-                workflow=workflow,
                 env=env
             )
             transkribus_task.save()
@@ -412,7 +507,6 @@ class Process(IndexableModel):
                         f'--corpus {self.corpus.id}',
                 slug=import_task_name,
                 depth=1,
-                workflow=workflow,
                 parents=[transkribus_task],
                 env=env
             )
@@ -425,7 +519,6 @@ class Process(IndexableModel):
             import_task = self.build_task(
                 command=f'python -m arkindex_tasks.import_files {self.id}',
                 slug=import_task_name,
-                workflow=workflow,
                 env=env
             )
             import_task.save()
@@ -460,7 +553,6 @@ class Process(IndexableModel):
             import_task = self.build_task(
                 command=command,
                 slug=import_task_name,
-                workflow=workflow,
                 env=env
             )
             import_task.save()
@@ -491,7 +583,6 @@ class Process(IndexableModel):
                 env=env,
                 requires_gpu=self.use_gpu,
                 shm_size=worker_version.docker_shm_size,
-                workflow=workflow,
             )
             training_task.save()
 
@@ -511,7 +602,6 @@ class Process(IndexableModel):
             import_task = self.build_task(
                 command=' '.join(args),
                 slug=import_task_name,
-                workflow=workflow,
                 env=env
             )
             import_task.save()
@@ -564,7 +654,6 @@ class Process(IndexableModel):
                         command=f'python3 -m arkindex_tasks.generate_thumbnails {elements_path}',
                         slug=f'thumbnails{task_suffix}',
                         depth=1,
-                        workflow=workflow,
                         env=env,
                     )
                     tasks_to_create.append(thumbnails_task)
@@ -575,15 +664,13 @@ class Process(IndexableModel):
                     task_name = worker_run.version.slug
                     # The suffix is handled by WorkerRun.build_task
                     task, parent_slugs = worker_run.build_task(
-                        workflow,
+                        self,
                         task_name,
                         env,
                         import_task_name,
                         elements_path,
                         chunk=chunk,
                         workflow_runs=worker_runs,
-                        # TODO: Let WorkerRun.build_task use Process.use_gpu once workflows are removed
-                        use_gpu=self.use_gpu,
                     )
                     tasks_to_create.append(task)
                     parents[task.slug] = parent_slugs
@@ -608,8 +695,6 @@ class Process(IndexableModel):
                 for parent_slug in parent_slugs
             )
 
-        return workflow
-
     def start(self, use_cache=False, worker_activity=False, use_gpu=False, **kwargs):
         if self.mode != ProcessMode.Workers:
             assert not use_cache and not worker_activity, 'Only worker processes can be started with cache or worker activities'
@@ -621,7 +706,9 @@ class Process(IndexableModel):
         if use_gpu:
             self.use_gpu = True
 
-        self.workflow = self.build_workflow(**kwargs)
+        self.build_workflow(**kwargs)
+
+        self.started = timezone.now()
         self.save()
 
         # Asynchronously initialize worker activities if required
@@ -644,23 +731,99 @@ class Process(IndexableModel):
         if worker_version_id:
             CorpusWorkerVersion.objects.get_or_create(corpus_id=self.corpus_id, worker_version_id=worker_version_id)
 
+        # Save an SQL query when re-serializing this process: we know that the last run of this process is run 0 since we just started it.
+        self.last_run = 0
+
     def retry(self):
+        """
+        Create new Task instances with a new run number and resets the completion date.
+
+        :raises AssertionError: If the workflow is not in a final state.
+        :returns: A dict mapping task slugs to task instances.
+        :rtype: dict
+        """
+        # TODO: Maybe we should regenerate the tasks with build_workflow() instead of just cloning them like this.
+        last_run = self.get_last_run()
+        assert last_run < 0 or self.is_final, 'Cannot retry a process that is not in a final state.'
+
+        # Retrying a process that never started is just the same as starting
+        if last_run < 0:
+            self.start()
+            return
+
         if self.mode == ProcessMode.Repository and self.revision is not None and not self.revision.repo.enabled:
             raise ValidationError('Git repository does not have any valid credentials')
 
-        if self.workflow:
-            self.workflow.retry()
+        new_tasks = {}
+        task_parents = {}
+        for task in self.tasks.all():
+            new_task = Task(
+                run=last_run + 1,
+                depth=task.depth,
+                slug=task.slug,
+                priority=task.priority,
+                tags=task.tags,
+                image=task.image,
+                shm_size=task.shm_size,
+                command=task.command,
+                env=task.env,
+                has_docker_socket=task.has_docker_socket,
+                image_artifact=task.image_artifact,
+                agent_id=task.agent_id,
+                requires_gpu=task.requires_gpu,
+                gpu_id=task.gpu_id,
+                process_id=task.process_id,
+                container=task.container,
+                extra_files=task.extra_files
+            )
 
-            # If worker activities are enabled for this process, no matter their previous state,
-            # re-initialize the activities just in case any were deleted or stolen by other processes
-            # before we retried this one.
-            if self.activity_state != ActivityState.Disabled:
-                from arkindex.project.triggers import initialize_activity
-                initialize_activity(self)
-                self.activity_state = ActivityState.Pending
-                self.save()
-        else:
-            self.start()
+            # Set the task token on the new task
+            new_task.env['ARKINDEX_TASK_TOKEN'] = new_task.token
+
+            # TODO: Use a prefetch and task.parents.all() instead. This line causes 1 query per task
+            task_parents[new_task.slug] = list(task.parents.values_list('slug', flat=True))
+            new_tasks[task.slug] = new_task
+
+        Task.objects.bulk_create(new_tasks.values())
+
+        # Add parents once all the tasks are created
+        for task in new_tasks.values():
+            if task_parents[task.slug]:
+                parents = [new_tasks[slug] for slug in task_parents[task.slug]]
+                task.parents.set(parents)
+
+        # setting last_run so that subsequent calls to get_last_run do not require db queries
+        self.last_run = last_run + 1
+        self.started = timezone.now()
+        self.finished = None
+
+        # If worker activities are enabled for this process, no matter their previous state,
+        # re-initialize the activities just in case any were deleted or stolen by other processes
+        # before we retried this one.
+        if self.activity_state != ActivityState.Disabled:
+            from arkindex.project.triggers import initialize_activity
+            initialize_activity(self)
+            self.activity_state = ActivityState.Pending
+
+        self.save()
+
+    def stop(self) -> None:
+        """
+        Fully stop the workflow by updating every running task to the Stopping state,
+        and every unscheduled task to the Stopped state.
+        """
+        assert not self.is_final, 'Only processes in a final state can be stopped'
+
+        stopping_count = self.tasks.filter(
+            state__in=[State.Pending, State.Running]
+        ).update(state=State.Stopping)
+        self.tasks.filter(state=State.Unscheduled).update(state=State.Stopped)
+
+        # If all the tasks are immediately stopped, then UpdateTask will not be able to update
+        # the finished attribute, so we do it here.
+        if not stopping_count:
+            self.finished = timezone.now()
+            self.save()
 
     def clear(self):
         self.worker_runs.all().delete()
@@ -945,7 +1108,7 @@ class WorkerRun(models.Model):
     class Meta:
         unique_together = (('version', 'process'),)
 
-    def build_task(self, workflow, task_name, env, import_task_name, elements_path, run=0, chunk=None, workflow_runs=None, use_gpu=False):
+    def build_task(self, process, task_name, env, import_task_name, elements_path, run=0, chunk=None, workflow_runs=None):
         '''
         Build the Task that will represent this WorkerRun in ponos using :
         - the docker image name given by the WorkerVersion
@@ -1007,9 +1170,9 @@ class WorkerRun(models.Model):
             depth=0,
             run=run,
             token=token,
-            workflow=workflow,
+            process=process,
             extra_files=extra_files,
-            requires_gpu=use_gpu and self.version.gpu_usage in (WorkerVersionGPUUsage.Required, WorkerVersionGPUUsage.Supported)
+            requires_gpu=process.use_gpu and self.version.gpu_usage in (WorkerVersionGPUUsage.Required, WorkerVersionGPUUsage.Supported)
         )
 
         return task, parents
diff --git a/arkindex/process/serializers/imports.py b/arkindex/process/serializers/imports.py
index 85c88ec259..89c144e3b0 100644
--- a/arkindex/process/serializers/imports.py
+++ b/arkindex/process/serializers/imports.py
@@ -1,3 +1,5 @@
+from collections import defaultdict
+
 from django.conf import settings
 from django.db.models import Q
 from rest_framework import serializers
@@ -5,10 +7,21 @@ from rest_framework.exceptions import PermissionDenied, ValidationError
 
 from arkindex.documents.models import Corpus, Element, ElementType
 from arkindex.ponos.models import Farm, State
-from arkindex.process.models import ActivityState, DataFile, Process, ProcessMode, WorkerRun, WorkerVersionState
+from arkindex.ponos.serializers import FarmSerializer, TaskLightSerializer
+from arkindex.process.models import (
+    ActivityState,
+    DataFile,
+    Process,
+    ProcessMode,
+    WorkerRun,
+    WorkerVersionGPUUsage,
+    WorkerVersionState,
+)
 from arkindex.process.serializers.git import RevisionSerializer
 from arkindex.project.mixins import ProcessACLMixin
 from arkindex.project.serializer_fields import EnumField, LinearRingField
+from arkindex.project.validators import MaxValueValidator
+from arkindex.training.models import ModelVersionState
 from arkindex.users.models import Role
 from arkindex.users.utils import get_max_level
 from transkribus import TranskribusAPI
@@ -19,10 +32,9 @@ class ProcessLightSerializer(serializers.ModelSerializer):
     Serialize a process
     """
 
-    state = EnumField(State, read_only=True)
+    state = EnumField(State, required=False)
     mode = EnumField(ProcessMode, read_only=True)
     creator = serializers.HiddenField(default=serializers.CurrentUserDefault())
-    workflow = serializers.HyperlinkedRelatedField(read_only=True, view_name='api:workflow-details')
     activity_state = EnumField(ActivityState, read_only=True)
 
     class Meta:
@@ -34,10 +46,9 @@ class ProcessLightSerializer(serializers.ModelSerializer):
             'mode',
             'corpus',
             'creator',
-            'workflow',
             'activity_state',
         )
-        read_only_fields = ('id', 'state', 'mode', 'corpus', 'creator', 'workflow', 'activity_state')
+        read_only_fields = ('id', 'state', 'mode', 'corpus', 'creator', 'activity_state')
 
 
 class ProcessTrainingSerializer(ProcessLightSerializer):
@@ -77,7 +88,12 @@ class ProcessSerializer(ProcessTrainingSerializer):
         style={'base_template': 'input.html'},
     )
     folder_type = serializers.SlugField(source='folder_type.slug', default=None, read_only=True)
-    element_type = serializers.SlugRelatedField(queryset=ElementType.objects.none(), slug_field='slug', allow_null=True)
+    element_type = serializers.SlugRelatedField(
+        queryset=ElementType.objects.none(),
+        slug_field='slug',
+        allow_null=True,
+        style={'base_template': 'input.html'},
+    )
     element_name_contains = serializers.CharField(
         source='name_contains',
         allow_null=True,
@@ -116,44 +132,89 @@ class ProcessSerializer(ProcessTrainingSerializer):
             'template_id',
         )
 
+
+class ProcessDetailsSerializer(ProcessSerializer):
+    farm = FarmSerializer(read_only=True)
+    tasks = TaskLightSerializer(many=True, read_only=True)
+
+    class Meta(ProcessSerializer.Meta):
+        fields = ProcessSerializer.Meta.fields + (
+            'farm',
+            'tasks',
+        )
+        read_only_fields = (
+            # Make the state writable again to allow stopping processes
+            tuple(set(ProcessSerializer.Meta.read_only_fields) - {'state'}) + (
+                'farm',
+                'tasks',
+            )
+        )
+
     def __init__(self, *args, **kwargs):
         super().__init__(*args, **kwargs)
-        process = self.context.get('process')
-        if not process or not process.corpus:
+        if not self.instance or not self.instance.corpus:
             return
-        self.fields['element_type'].queryset = ElementType.objects.filter(corpus=process.corpus)
-        self.fields['element_id'].queryset = process.corpus.elements.all()
+
+        self.fields['element_type'].queryset = self.instance.corpus.types.all()
+        self.fields['element_id'].queryset = (
+            self.instance.corpus.elements.all()
+            # Include the extra attributes that will be serialized in the response
+            .select_related('type', 'image__server')
+        )
 
     def validate(self, data):
         data = super().validate(data)
-        # Editing a process name only is always allowed
-        if set(data.keys()) == {'name'}:
-            return data
+        edited_fields = set(data.keys())
 
-        if not self.instance:
-            return
+        # Fields that can always be edited on any process of any state
+        editable_fields = {'name', 'state'}
 
-        # Allow editing the element ID on processes at any time
-        if self.instance.mode in (ProcessMode.Files, ProcessMode.IIIF) and set(data.keys()) == {'element'}:
-            return data
+        # Allow editing the element ID and name on Files and IIIF processes at any time
+        # TODO: Only allow editing the element ID on a running file import to Ponos tasks,
+        # since this edition is only permitted to show the "View element" button once the import completes.
+        if self.instance.mode in (ProcessMode.Files, ProcessMode.IIIF):
+            editable_fields.add('element')
 
-        if self.instance.state == State.Running:
-            raise serializers.ValidationError({'__all__': ['Cannot edit a workflow while it is running']})
+        # If some fields are being edited that are not the fields that are always editable
+        if edited_fields - editable_fields:
+            # Fail if the process is running or about to run
+            if self.instance.state in (State.Pending, State.Running):
+                raise serializers.ValidationError({'__all__': ['Cannot edit a process while it is running']})
 
-        if self.instance.mode != ProcessMode.Workers:
-            raise serializers.ValidationError({'__all__': [f'Only processes of mode {ProcessMode.Workers} can be updated']})
+            # Otherwise, only allow editing Workers processes.
+            if self.instance.mode != ProcessMode.Workers:
+                raise serializers.ValidationError({'__all__': ['Only processes of mode Workers can be updated']})
 
         return data
 
+    def validate_state(self, state):
+        if state != State.Stopping:
+            raise serializers.ValidationError("Can only change the state to 'stopping'")
+
+        if self.instance.state not in (State.Unscheduled, State.Pending, State.Running):
+            raise serializers.ValidationError(f'Cannot stop a {self.instance.state.value} process')
+
+        return state
+
+    def update(self, instance, validated_data):
+        if validated_data.get('state') == State.Stopping:
+            self.instance.stop()
+            # This is a read-only property of a Process, so we have to remove it to prevent it from being set by DRF
+            del validated_data['state']
+
+        return super().update(instance, validated_data)
+
 
 class ProcessListSerializer(ProcessLightSerializer):
-    created = serializers.DateTimeField(read_only=True)
-    updated = serializers.DateTimeField(source='last_date', read_only=True)
-    finished = serializers.DateTimeField(source='workflow.finished', read_only=True, default=None)
 
     class Meta(ProcessLightSerializer.Meta):
-        fields = ProcessLightSerializer.Meta.fields + ('created', 'updated', 'finished')
-        read_only_fields = ProcessLightSerializer.Meta.read_only_fields + ('created', 'updated', 'finished')
+        fields = ProcessLightSerializer.Meta.fields + ('created', 'updated', 'started', 'finished')
+        read_only_fields = ProcessLightSerializer.Meta.read_only_fields + ('created', 'updated', 'started', 'finished')
+        # Show in the API docs that Process.started and Process.finished are nullable; this is not detected automatically
+        extra_kwargs = {
+            'started': {'allow_null': True},
+            'finished': {'allow_null': True},
+        }
 
 
 class FilesProcessSerializer(serializers.Serializer):
@@ -247,13 +308,66 @@ class FilesProcessSerializer(serializers.Serializer):
 
 
 class StartProcessSerializer(serializers.Serializer):
-    chunks = serializers.IntegerField(min_value=1, max_value=settings.MAX_CHUNKS, default=1)
+    chunks = serializers.IntegerField(
+        min_value=1,
+        validators=[MaxValueValidator(lambda: settings.MAX_CHUNKS)],
+        default=1,
+    )
     thumbnails = serializers.BooleanField(default=False)
     farm = serializers.PrimaryKeyRelatedField(queryset=Farm.objects.all(), required=False, allow_null=True)
     use_cache = serializers.BooleanField(default=False)
     use_gpu = serializers.BooleanField(default=False, allow_null=True)
     worker_activity = serializers.BooleanField(default=False)
 
+    def validate(self, validated_data):
+        assert self.instance is not None, 'A Process instance is required for this serializer'
+        errors = defaultdict(list)
+
+        # Use process.worker_runs.all() to access the (prefetched) worker_runs to avoid new SQL queries
+        # The related worker versions have also been prefetched
+        if len(list(self.instance.worker_runs.all())) > 0:
+            if validated_data.get('use_gpu') and (not any(item.version.gpu_usage != WorkerVersionGPUUsage.Disabled for item in self.instance.worker_runs.all())):
+                errors['use_gpu'].append('The process is configured to use a GPU, but does not include any workers that support GPU usage.')
+
+            # Check if a worker run has no model version but version.model_usage = True
+            missing_model_versions = []
+            unavailable_versions = []
+            has_unavailable_model_versions = False
+
+            for worker_run in self.instance.worker_runs.all():
+                if worker_run.version.model_usage and worker_run.model_version_id is None:
+                    missing_model_versions.append(worker_run.version.worker.name)
+                if worker_run.version.state != WorkerVersionState.Available or worker_run.version.docker_image_id is None:
+                    unavailable_versions.append(worker_run.version.id)
+                if worker_run.model_version_id and worker_run.model_version.state != ModelVersionState.Available:
+                    has_unavailable_model_versions = True
+
+            if len(missing_model_versions) > 0:
+                errors['model_version'].append(f"The following workers require a model version and none was set: {missing_model_versions}")
+            if has_unavailable_model_versions:
+                errors['model_version'].append('This process contains one or more unavailable model versions and cannot be started.')
+            if len(unavailable_versions) > 0:
+                errors['version'].append('This process contains one or more unavailable worker versions and cannot be started.')
+
+        else:
+            if validated_data.get('worker_activity'):
+                errors['worker_activity'].append('The process must have workers attached to handle their activity.')
+            if validated_data.get('use_cache'):
+                errors['use_cache'].append('The process must have workers attached to use cached results.')
+            if validated_data.get('use_gpu'):
+                errors['use_gpu'].append('The process must have workers attached to use GPUs.')
+            if not validated_data.get('thumbnails'):
+                errors['__all__'].append('The process must either use thumbnail generation or have worker runs.')
+
+        if errors:
+            raise ValidationError(errors)
+
+        return validated_data
+
+    def update(self, process, validated_data):
+        process.start(**validated_data)
+        return process
+
 
 class CreateProcessTemplateSerializer(serializers.ModelSerializer):
     name = serializers.CharField(required=False, max_length=100, default="")
diff --git a/arkindex/process/serializers/workers.py b/arkindex/process/serializers/workers.py
index bc6911790a..0b4be829c0 100644
--- a/arkindex/process/serializers/workers.py
+++ b/arkindex/process/serializers/workers.py
@@ -315,7 +315,7 @@ class WorkerActivitySerializer(serializers.ModelSerializer):
 
     def validate_process_id(self, process):
         request = self.context.get('request')
-        if request and isinstance(request.auth, Task) and process.workflow_id != request.auth.workflow_id:
+        if request and isinstance(request.auth, Task) and process.id != request.auth.process_id:
             raise serializers.ValidationError('Only WorkerActivities for the process of the currently authenticated task can be updated.')
         return process
 
diff --git a/arkindex/process/signals.py b/arkindex/process/signals.py
index ad23523dcb..6d5b51cce9 100644
--- a/arkindex/process/signals.py
+++ b/arkindex/process/signals.py
@@ -75,7 +75,7 @@ def stop_started_activities(sender, task, **kwargs):
     to also be marked as error when they are successful, but we do not have any reliable link
     between Ponos tasks and Arkindex worker versions.
     """
-    process = Process.objects.filter(workflow_id=task.workflow_id).only('id', 'activity_state').first()
+    process = Process.objects.filter(id=task.process_id).only('id', 'activity_state').first()
     if not process or process.activity_state == ActivityState.Disabled:
         return
     count = process.activities.filter(state=WorkerActivityState.Started).update(state=WorkerActivityState.Error)
diff --git a/arkindex/process/tests/test_create_s3_import.py b/arkindex/process/tests/test_create_s3_import.py
index e5fb55e12a..2614be53f3 100644
--- a/arkindex/process/tests/test_create_s3_import.py
+++ b/arkindex/process/tests/test_create_s3_import.py
@@ -127,7 +127,7 @@ class TestCreateS3Import(FixtureTestCase):
         ImageServer.objects.create(id=999, display_name='Ingest image server', url='https://dev.null.teklia.com')
         element = self.corpus.elements.get(name='Volume 1')
 
-        with self.assertNumQueries(25), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
+        with self.assertNumQueries(24), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             response = self.client.post(reverse('api:s3-import-create'), {
                 'corpus_id': str(self.corpus.id),
                 'element_id': str(element.id),
@@ -156,8 +156,7 @@ class TestCreateS3Import(FixtureTestCase):
         self.assertIsNone(worker_run.configuration_id)
         self.assertIsNone(worker_run.model_version_id)
 
-        self.assertIsNotNone(process.workflow_id)
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         self.assertEqual(task.slug, 'import')
         self.assertEqual(task.image, 'arkindex-tasks-image')
         self.assertEqual(task.command, f'python -m arkindex_tasks.import_s3 --corpus={self.corpus.id} '
@@ -190,7 +189,7 @@ class TestCreateS3Import(FixtureTestCase):
         self.corpus.types.create(slug='folder', display_name='Folder', folder=True)
         ImageServer.objects.create(id=999, display_name='Ingest image server', url='https://dev.null.teklia.com')
 
-        with self.assertNumQueries(23), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
+        with self.assertNumQueries(22), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             response = self.client.post(reverse('api:s3-import-create'), {
                 'corpus_id': str(self.corpus.id),
                 'bucket_name': 'blah',
@@ -208,7 +207,6 @@ class TestCreateS3Import(FixtureTestCase):
         self.assertEqual(process.element_type, self.corpus.types.get(slug='page'))
         self.assertEqual(process.bucket_name, 'blah')
         self.assertIsNone(process.prefix)
-        self.assertIsNotNone(process.workflow_id)
 
         worker_run = process.worker_runs.get()
         self.assertEqual(worker_run.version, self.import_worker_version)
@@ -216,7 +214,7 @@ class TestCreateS3Import(FixtureTestCase):
         self.assertIsNone(worker_run.configuration_id)
         self.assertIsNone(worker_run.model_version_id)
 
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         self.assertEqual(task.slug, 'import')
         self.assertEqual(task.image, 'arkindex-tasks-image')
         self.assertEqual(task.command, f'python -m arkindex_tasks.import_s3 --corpus={self.corpus.id} '
diff --git a/arkindex/process/tests/test_create_training_process.py b/arkindex/process/tests/test_create_training_process.py
index c4e544cc6e..5228bd78b1 100644
--- a/arkindex/process/tests/test_create_training_process.py
+++ b/arkindex/process/tests/test_create_training_process.py
@@ -1,6 +1,5 @@
 import uuid
 from unittest.mock import patch
-from urllib.parse import urljoin
 
 from django.test import override_settings
 from django.urls import reverse
@@ -272,7 +271,7 @@ class TestCreateTrainingProcess(FixtureTestCase):
         training_process = Process.objects.filter(name='Test training')
         self.assertEqual(training_process.count(), 0)
 
-        with self.assertNumQueries(25):
+        with self.assertNumQueries(24):
             response = self.client.post(reverse('api:process-training'), self.base_payload)
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
@@ -301,14 +300,11 @@ class TestCreateTrainingProcess(FixtureTestCase):
             'use_cache': False,
             'use_gpu': False,
             'validation_folder_id': None,
-            'workflow': urljoin('http://testserver/api/v1/', training_process.workflow.get_absolute_url()),
         })
 
-        # Check workflow properties
-        workflow = training_process.workflow
         worker_run = training_process.worker_runs.get()
 
-        training_task = workflow.tasks.get()
+        training_task = training_process.tasks.get()
         self.assertEqual(training_task.slug, 'training')
         self.assertDictEqual(training_task.env, {
             'ARKINDEX_CORPUS_ID': str(self.corpus.id),
@@ -333,7 +329,7 @@ class TestCreateTrainingProcess(FixtureTestCase):
         self.client.force_login(self.user)
         training_process = Process.objects.filter(name='Test training')
         self.assertEqual(training_process.count(), 0)
-        with self.assertNumQueries(24):
+        with self.assertNumQueries(23):
             response = self.client.post(reverse('api:process-training'), self.base_payload)
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(training_process.count(), 1)
@@ -346,7 +342,7 @@ class TestCreateTrainingProcess(FixtureTestCase):
         self.client.force_login(self.user)
         training_process = Process.objects.filter(name='Test training')
         self.assertEqual(training_process.count(), 0)
-        with self.assertNumQueries(24):
+        with self.assertNumQueries(23):
             response = self.client.post(reverse('api:process-training'), self.base_payload)
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(training_process.count(), 1)
@@ -358,7 +354,7 @@ class TestCreateTrainingProcess(FixtureTestCase):
         self.client.force_login(self.user)
         training_process = Process.objects.filter(name='Test training')
         self.assertEqual(training_process.count(), 0)
-        with self.assertNumQueries(30):
+        with self.assertNumQueries(29):
             response = self.client.post(reverse('api:process-training'), {
                 **self.base_payload,
                 'validation_folder_id': str(self.validation_folder.id),
@@ -391,13 +387,9 @@ class TestCreateTrainingProcess(FixtureTestCase):
             'use_cache': False,
             'use_gpu': True,
             'validation_folder_id': str(self.validation_folder.id),
-            'workflow': urljoin('http://testserver/api/v1/', training_process.workflow.get_absolute_url()),
         })
         self.assertEqual(training_process.creator_id, self.user.id)
-        # Check workflow properties
-        workflow = training_process.workflow
-        self.assertEqual(workflow.tasks.count(), 1)
-        task = workflow.tasks.get()
+        task = training_process.tasks.get()
         self.assertEqual(sorted(task.env.keys()), [
             'ARKINDEX_CORPUS_ID',
             'ARKINDEX_PROCESS_ID',
@@ -423,14 +415,14 @@ class TestCreateTrainingProcess(FixtureTestCase):
         self.assertEqual(self.training_worker_version.docker_shm_size, '999G')
         self.training_worker_version.save()
 
-        with self.assertNumQueries(25):
+        with self.assertNumQueries(24):
             response = self.client.post(reverse('api:process-training'), self.base_payload)
             self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
         training_process = Process.objects.get(mode=ProcessMode.Training, name='Test training')
         worker_run = training_process.worker_runs.get()
 
-        task = training_process.workflow.tasks.get()
+        task = training_process.tasks.get()
         self.assertEqual(task.env, {
             'ARKINDEX_CORPUS_ID': str(self.corpus.id),
             'ARKINDEX_PROCESS_ID': str(training_process.id),
diff --git a/arkindex/process/tests/test_datafile_api.py b/arkindex/process/tests/test_datafile_api.py
index b75034083e..db2d56b001 100644
--- a/arkindex/process/tests/test_datafile_api.py
+++ b/arkindex/process/tests/test_datafile_api.py
@@ -166,7 +166,7 @@ class TestDataFileApi(FixtureAPITestCase):
         process.files.add(self.df)
         with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
 
         response = self.client.get(
             reverse('api:file-retrieve', kwargs={'pk': self.df.id}),
diff --git a/arkindex/process/tests/test_workflows_api.py b/arkindex/process/tests/test_element_workflow.py
similarity index 92%
rename from arkindex/process/tests/test_workflows_api.py
rename to arkindex/process/tests/test_element_workflow.py
index 1567f02d6e..e51fc10974 100644
--- a/arkindex/process/tests/test_workflows_api.py
+++ b/arkindex/process/tests/test_element_workflow.py
@@ -13,9 +13,9 @@ from arkindex.project.tests import FixtureAPITestCase
 from arkindex.users.models import Role
 
 
-class TestWorkflows(FixtureAPITestCase):
+class TestCreateElementsWorkflow(FixtureAPITestCase):
     """
-    Test workflows from Arkindex existing elements
+    Test creating Workers processes with CreateElementsWorkflow
     """
 
     @classmethod
@@ -66,7 +66,7 @@ class TestWorkflows(FixtureAPITestCase):
 
         data = response.json()
         process = Process.objects.get(id=data['id'])
-        self.assertIsNone(process.workflow)
+        self.assertFalse(process.tasks.exists())
 
         self.assertDictEqual(data, {
             'name': None,
@@ -76,7 +76,6 @@ class TestWorkflows(FixtureAPITestCase):
             'files': [],
             'mode': 'workers',
             'revision': process.revision,
-            'workflow': None,
             'folder_type': None,
             'element_type': 'page',
             'element': None,
@@ -98,11 +97,10 @@ class TestWorkflows(FixtureAPITestCase):
             # The process needs a worker run or thumbnails generation to start
             {'thumbnails': 'true'}
         )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process.refresh_from_db()
-        self.assertIsNotNone(process.workflow)
 
-        init_task = process.workflow.tasks.get(slug='initialisation')
+        init_task = process.tasks.get(slug='initialisation')
         self.assertEqual(init_task.image, 'tasks:latest')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process.id} --chunks-number 1')
 
@@ -122,7 +120,7 @@ class TestWorkflows(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         data = response.json()
         process = Process.objects.get(id=data['id'])
-        self.assertIsNone(process.workflow)
+        self.assertFalse(process.tasks.exists())
 
         self.assertEqual(process.element, self.volume)
         self.assertEqual(data.pop('element')['id'], str(self.volume.id))
@@ -134,7 +132,6 @@ class TestWorkflows(FixtureAPITestCase):
             'corpus': str(self.corpus.id),
             'files': [],
             'revision': process.revision,
-            'workflow': None,
             'folder_type': None,
             'element_type': None,
             'template_id': None,
@@ -339,7 +336,7 @@ class TestWorkflows(FixtureAPITestCase):
         """
         self.client.force_login(self.user)
         element = self.corpus.elements.create(type=self.pages.first().type, name='Kill me please')
-        with self.assertNumQueries(12):
+        with self.assertNumQueries(13):
             response = self.client.post(
                 reverse('api:corpus-workflow'),
                 {
@@ -358,7 +355,6 @@ class TestWorkflows(FixtureAPITestCase):
             'files': [],
             'mode': 'workers',
             'revision': process.revision,
-            'workflow': None,
             'folder_type': None,
             'element_type': None,
             'element': {
@@ -430,7 +426,7 @@ class TestWorkflows(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         data = response.json()
         process = Process.objects.get(id=data['id'])
-        self.assertIsNone(process.workflow)
+        self.assertFalse(process.tasks.exists())
         self.assertEqual(process.elements.get(), page)
 
         # The process needs to be started to produce a workflow
@@ -439,11 +435,10 @@ class TestWorkflows(FixtureAPITestCase):
             # The process needs a worker run or thumbnails generation to start
             {'thumbnails': 'true'}
         )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process.refresh_from_db()
-        self.assertIsNotNone(process.workflow)
 
-        init_task = process.workflow.tasks.get(slug='initialisation')
+        init_task = process.tasks.get(slug='initialisation')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process.id} --chunks-number 1')
 
         response = self.client.get(reverse('api:process-elements-list', kwargs={'pk': process.id}))
@@ -499,7 +494,7 @@ class TestWorkflows(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         data = response.json()
         process = Process.objects.get(id=data['id'])
-        self.assertIsNone(process.workflow)
+        self.assertFalse(process.tasks.exists())
 
         response = self.client.post(
             reverse('api:process-start', kwargs={'pk': str(process.id)}),
@@ -508,17 +503,16 @@ class TestWorkflows(FixtureAPITestCase):
                 'thumbnails': True,
             }
         )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process.refresh_from_db()
-        self.assertIsNotNone(process.workflow)
 
-        self.assertEqual(process.workflow.tasks.count(), 4)
-        init_task = process.workflow.tasks.get(slug='initialisation')
+        self.assertEqual(process.tasks.count(), 4)
+        init_task = process.tasks.get(slug='initialisation')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process.id} --chunks-number 3')
 
         for i in range(1, 4):
             self.assertEqual(
-                process.workflow.tasks.get(slug=f'thumbnails_{i}').command,
+                process.tasks.get(slug=f'thumbnails_{i}').command,
                 f'python3 -m arkindex_tasks.generate_thumbnails /data/initialisation/elements_chunk_{i}.json'
             )
 
@@ -545,28 +539,27 @@ class TestWorkflows(FixtureAPITestCase):
             parents=[run_1.id],
         )
 
-        self.assertIsNone(process_2.workflow)
+        self.assertFalse(process_2.tasks.exists())
         self.assertFalse(self.corpus.worker_versions.exists())
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process_2.id)})
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(response.json()['id'], str(process_2.id))
 
         process_2.refresh_from_db()
         self.assertEqual(process_2.state, State.Unscheduled)
-        self.assertIsNotNone(process_2.workflow)
 
-        self.assertEqual(process_2.workflow.tasks.count(), 3)
+        self.assertEqual(process_2.tasks.count(), 3)
 
-        init_task = process_2.workflow.tasks.get(slug='initialisation')
+        init_task = process_2.tasks.get(slug='initialisation')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process_2.id} --chunks-number 1')
         self.assertEqual(init_task.image, 'registry.teklia.com/tasks')
 
-        reco_task = process_2.workflow.tasks.get(slug=f'reco_{str(self.version_1.id)[0:6]}')
+        reco_task = process_2.tasks.get(slug=f'reco_{str(self.version_1.id)[0:6]}')
         self.assertEqual(reco_task.command, None)
         self.assertEqual(reco_task.image, f'my_repo.fake/workers/worker/reco:{self.version_1.id}')
         self.assertEqual(reco_task.shm_size, None)
@@ -580,7 +573,7 @@ class TestWorkflows(FixtureAPITestCase):
             'ARKINDEX_TASK_TOKEN': '67891'
         })
 
-        dla_task = process_2.workflow.tasks.get(slug=f'dla_{str(self.version_2.id)[0:6]}')
+        dla_task = process_2.tasks.get(slug=f'dla_{str(self.version_2.id)[0:6]}')
         self.assertEqual(dla_task.command, None)
         self.assertEqual(dla_task.image, f'my_repo.fake/workers/worker/dla:{self.version_2.id}')
         self.assertEqual(dla_task.shm_size, None)
@@ -637,21 +630,20 @@ class TestWorkflows(FixtureAPITestCase):
         process_2.use_cache = True
         process_2.save()
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process_2.id)})
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(response.json()['use_cache'], True)
         process_2.refresh_from_db()
         self.assertEqual(process_2.state, State.Unscheduled)
-        self.assertIsNotNone(process_2.workflow)
 
-        init_task = process_2.workflow.tasks.get(slug='initialisation')
+        init_task = process_2.tasks.get(slug='initialisation')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process_2.id} --chunks-number 1 --use-cache')
         self.assertEqual(init_task.image, 'registry.teklia.com/tasks')
 
-        worker_task = process_2.workflow.tasks.get(slug=f'reco_{str(self.version_1.id)[0:6]}')
+        worker_task = process_2.tasks.get(slug=f'reco_{str(self.version_1.id)[0:6]}')
         self.assertEqual(worker_task.command, None)
         self.assertEqual(worker_task.image, f'my_repo.fake/workers/worker/reco:{self.version_1.id}')
         self.assertEqual(worker_task.image_artifact.id, self.version_1.docker_image.id)
@@ -685,20 +677,19 @@ class TestWorkflows(FixtureAPITestCase):
         process_2.use_gpu = True
         process_2.save()
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             response = self.client.post(reverse('api:process-start', kwargs={'pk': str(process_2.id)}))
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
         self.assertEqual(response.json()['use_gpu'], True)
         process_2.refresh_from_db()
         self.assertEqual(process_2.state, State.Unscheduled)
-        self.assertIsNotNone(process_2.workflow)
 
-        init_task = process_2.workflow.tasks.get(slug='initialisation')
+        init_task = process_2.tasks.get(slug='initialisation')
         self.assertEqual(init_task.command, f'python -m arkindex_tasks.init_elements {process_2.id} --chunks-number 1')
         self.assertEqual(init_task.image, 'registry.teklia.com/tasks')
 
-        worker_task = process_2.workflow.tasks.get(slug=f'worker-gpu_{str(self.version_3.id)[0:6]}')
+        worker_task = process_2.tasks.get(slug=f'worker-gpu_{str(self.version_3.id)[0:6]}')
         self.assertEqual(worker_task.command, None)
         self.assertEqual(worker_task.image, f'my_repo.fake/workers/worker/worker-gpu:{self.version_3.id}')
         self.assertEqual(worker_task.image_artifact.id, self.version_3.docker_image.id)
@@ -728,20 +719,20 @@ class TestWorkflows(FixtureAPITestCase):
         process.save()
         self.client.force_login(self.user)
         response = self.client.post(reverse('api:process-start', kwargs={'pk': str(process.id)}))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process.refresh_from_db()
         self.assertEqual(process.state, State.Unscheduled)
 
-        init_task, worker_task = process.workflow.tasks.filter(run=0)
+        init_task, worker_task = process.tasks.filter(run=0)
         self.assertEqual(init_task.requires_gpu, False)
         self.assertEqual(worker_task.requires_gpu, True)
 
-        process.workflow.tasks.update(state=State.Stopped)
+        process.tasks.update(state=State.Stopped)
 
         response = self.client.post(reverse('api:process-retry', kwargs={'pk': str(process.id)}))
         self.assertEqual(response.status_code, status.HTTP_200_OK)
 
-        init_task, worker_task = process.workflow.tasks.filter(run=1)
+        init_task, worker_task = process.tasks.filter(run=1)
         self.assertEqual(init_task.requires_gpu, False)
         self.assertEqual(worker_task.requires_gpu, True)
 
@@ -762,7 +753,7 @@ class TestWorkflows(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(
             response.json(),
-            {'use_gpu': 'The process is configured to use GPU, but does not include any workers that support GPU usage.'}
+            {'use_gpu': ['The process is configured to use a GPU, but does not include any workers that support GPU usage.']}
         )
 
     def test_use_gpu_some_unsupported(self):
@@ -778,15 +769,15 @@ class TestWorkflows(FixtureAPITestCase):
         process.use_gpu = True
         process.save()
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process.id)}),
                 {'use_gpu': 'true'}
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
 
         process.refresh_from_db()
-        init_task, worker_2_task, worker_3_task = process.workflow.tasks.filter(run=0)
+        init_task, worker_2_task, worker_3_task = process.tasks.filter(run=0)
         self.assertEqual(init_task.requires_gpu, False)
         self.assertEqual(worker_2_task.requires_gpu, False)
         self.assertEqual(worker_3_task.requires_gpu, True)
diff --git a/arkindex/process/tests/test_gitlab_provider.py b/arkindex/process/tests/test_gitlab_provider.py
index 7625fd9492..35f85bc0de 100644
--- a/arkindex/process/tests/test_gitlab_provider.py
+++ b/arkindex/process/tests/test_gitlab_provider.py
@@ -702,7 +702,6 @@ class TestGitLabProvider(FixtureTestCase):
         # Should only include data from fixtures at first
         self.assertFalse(self.repo.revisions.exclude(id=self.rev.id).exists())
         self.assertFalse(self.repo.refs.exists())
-        self.assertFalse(Process.objects.filter(mode=ProcessMode.Repository).exists())
 
         # Update references for this repo
         provider = GitLabProvider(credentials=self.creds)
@@ -738,7 +737,7 @@ class TestGitLabProvider(FixtureTestCase):
         self.assertListEqual(list(
             Process
             .objects
-            .filter(mode=ProcessMode.Repository)
+            .filter(mode=ProcessMode.Repository, revision__isnull=False)
             .values_list('name', 'mode', 'revision__hash')
             .order_by('revision__hash')
         ), [
diff --git a/arkindex/process/tests/test_process_elements.py b/arkindex/process/tests/test_process_elements.py
index 60c4fd314a..6fbf741903 100644
--- a/arkindex/process/tests/test_process_elements.py
+++ b/arkindex/process/tests/test_process_elements.py
@@ -527,7 +527,7 @@ class TestProcessElements(FixtureAPITestCase):
         """
         ListProcessElements does not support repositories without a corpus
         """
-        process = self.superuser.processes.get(corpus=None)
+        process = self.superuser.processes.get(corpus=None, mode=ProcessMode.Local)
         self.client.force_login(self.superuser)
 
         for mode in (ProcessMode.Local, ProcessMode.Repository):
diff --git a/arkindex/process/tests/test_processes.py b/arkindex/process/tests/test_processes.py
index 2785d272e1..d2fab7f895 100644
--- a/arkindex/process/tests/test_processes.py
+++ b/arkindex/process/tests/test_processes.py
@@ -1,15 +1,13 @@
-
-import importlib
 import uuid
 from unittest.mock import MagicMock, call, patch
 
 from django.test import override_settings
 from django.urls import reverse
+from django.utils import timezone
 from rest_framework import status
-from rest_framework.exceptions import ValidationError
 
 from arkindex.documents.models import Corpus, ElementType
-from arkindex.ponos.models import Farm, State, Task, Workflow
+from arkindex.ponos.models import Farm, State
 from arkindex.process.models import (
     ActivityState,
     Process,
@@ -33,6 +31,7 @@ class TestProcesses(FixtureAPITestCase):
     @classmethod
     def setUpTestData(cls):
         super().setUpTestData()
+        cls.farm = Farm.objects.get(name='Default farm')
         cls.creds = cls.user.credentials.get()
         cls.repo = cls.creds.repos.get(url='http://my_repo.fake/workers/worker')
         cls.rev = cls.repo.revisions.get()
@@ -67,12 +66,13 @@ class TestProcesses(FixtureAPITestCase):
         cls.user2 = User.objects.create_user('user2@test.test', display_name='Process creator')
 
         # Guest access (A user owns a process on a corpus they are not a member anymore)
+        cls.private_corpus = Corpus.objects.create(name='Private')
         cls.user_img_process = Process.objects.create(
             mode=ProcessMode.Files,
             creator=cls.user,
-            corpus=Corpus.objects.create(name='Private'),
+            corpus=cls.private_corpus,
+            farm=cls.farm,
         )
-        cls.private_corpus = Corpus.objects.get(name='Private')
         # Contributor access
         cls.repo.memberships.create(user=cls.user, level=Role.Contributor.value)
         cls.repository_process = Process.objects.create(
@@ -95,7 +95,6 @@ class TestProcesses(FixtureAPITestCase):
             creator=cls.user,
             corpus=cls.corpus,
             mode=ProcessMode.Training,
-            workflow=cls.elts_process.workflow,
             model=cls.training_model,
             train_folder=cls.train,
             validation_folder=cls.val,
@@ -113,48 +112,10 @@ class TestProcesses(FixtureAPITestCase):
 
         cls.import_worker_version = WorkerVersion.objects.get(worker__slug='file_import')
 
-    def _serialize_process(self, process):
-        updated = process.updated
-        if process.workflow:
-            updated = max(updated, process.workflow.tasks.order_by('-updated').first().updated)
-        return {
-            'name': process.name,
-            'id': str(process.id),
-            'state': process.state.value,
-            'mode': process.mode.value,
-            'corpus': process.corpus_id and str(process.corpus.id),
-            'workflow': process.workflow and f'http://testserver/api/v1/workflow/{process.workflow.id}/',
-            'activity_state': process.activity_state.value,
-            'created': process.created.isoformat().replace('+00:00', 'Z'),
-            'updated': updated.isoformat().replace('+00:00', 'Z'),
-            'finished': (
-                process.workflow.finished.isoformat().replace('+00:00', 'Z')
-                if process.workflow and process.workflow.finished
-                else None
-            ),
-        }
-
-    def build_task(self, workflow_id, run, state, depth=1):
-        """
-        Create a simple task with its workflow and process if required
-        """
-        workflow, created = Workflow.objects.get_or_create(
-            id=workflow_id,
-            farm=Farm.objects.get(name="Wheat farm")
-        )
-        if created:
-            # Creates the associated process
-            Process.objects.create(
-                corpus=self.corpus,
-                workflow=workflow,
-                mode=ProcessMode.Workers,
-                creator=self.user,
-            )
-        return Task.objects.create(run=run, depth=depth, workflow=workflow, state=state, slug=str(uuid.uuid4()))
-
     def test_list_requires_login(self):
-        response = self.client.get(reverse('api:process-list'))
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+        with self.assertNumQueries(0):
+            response = self.client.get(reverse('api:process-list'))
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
     def test_list(self):
         """
@@ -165,12 +126,14 @@ class TestProcesses(FixtureAPITestCase):
         self.user_img_process.save()
         self.repository_process.save()
         # An update on a process task will make it first
-        task = self.elts_process.workflow.tasks.first()
+        task = self.elts_process.tasks.first()
         task.save()
-
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        with self.assertNumQueries(8):
+            response = self.client.get(reverse('api:process-list'))
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         # Processes are ordered
         self.assertDictEqual(response.json(), {
             'count': 5,
@@ -178,54 +141,160 @@ class TestProcesses(FixtureAPITestCase):
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(p)
-                for p in [
-                    self.elts_process,
-                    self.repository_process,
-                    self.user_img_process,
-                    self.training_process,
-                    self.workers_process,
-                ]
+                {
+                    'id': str(self.repository_process.id),
+                    'name': None,
+                    'mode': 'repository',
+                    'corpus': None,
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.repository_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.repository_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.user_img_process.id),
+                    'name': None,
+                    'mode': 'files',
+                    'corpus': str(self.private_corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.user_img_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.user_img_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.elts_process.id),
+                    'name': None,
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.elts_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.elts_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': self.elts_process.started.isoformat().replace('+00:00', 'Z'),
+                    'finished': None,
+                },
+                {
+                    'id': str(self.training_process.id),
+                    'name': None,
+                    'mode': 'training',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.training_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.training_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.workers_process.id),
+                    'name': 'Process fixture',
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.workers_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.workers_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
     @override_settings(IMPORTS_WORKER_VERSION=None)
-    def test_list_with_workflow(self):
+    def test_list_with_tasks(self):
         """
-        Filter processes that have a workflow i.e. that have been started
+        Filter processes that have tasks i.e. that have been started
         """
         with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             self.user_img_process.start()
+
         self.client.force_login(self.user)
         self.user_img_process.activity_state = ActivityState.Ready
         self.user_img_process.save()
-        with (self.assertNumQueries(9), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id))):
-            response = self.client.get(reverse('api:process-list'), {'with_workflow': 'true'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        data = response.json()
-        self.assertEqual(len(data['results']), 1)
-        results = data['results']
-        self.assertListEqual(results, [self._serialize_process(self.user_img_process)])
 
-    def test_list_exclude_workflow(self):
+        with self.assertNumQueries(8):
+            response = self.client.get(reverse('api:process-list'), {'with_tasks': 'true'})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.assertDictEqual(response.json(), {
+            'count': 1,
+            'number': 1,
+            'next': None,
+            'previous': None,
+            'results': [
+                {
+                    'id': str(self.user_img_process.id),
+                    'name': None,
+                    'mode': 'files',
+                    'corpus': str(self.private_corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'ready',
+                    'created': self.user_img_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.user_img_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': self.user_img_process.started.isoformat().replace('+00:00', 'Z'),
+                    'finished': None,
+                },
+            ],
+        })
+
+    def test_list_exclude_tasks(self):
         """
         Filter process that have not been started yet
         """
         self.repository_process.start()
         self.elts_process.start()
         self.client.force_login(self.user)
+
         with self.assertNumQueries(7):
-            response = self.client.get(reverse('api:process-list'), {'with_workflow': 'false'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+            response = self.client.get(reverse('api:process-list'), {'with_tasks': 'false'})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertDictEqual(response.json(), {
             'count': 3,
             'number': 1,
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(self.training_process),
-                self._serialize_process(self.user_img_process),
-                self._serialize_process(self.workers_process),
+                {
+                    'id': str(self.training_process.id),
+                    'name': None,
+                    'mode': 'training',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.training_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.training_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.user_img_process.id),
+                    'name': None,
+                    'mode': 'files',
+                    'corpus': str(self.private_corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.user_img_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.user_img_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.workers_process.id),
+                    'name': 'Process fixture',
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.workers_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.workers_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
@@ -234,23 +303,31 @@ class TestProcesses(FixtureAPITestCase):
         corpus2 = Corpus.objects.create(name='Another corpus', description='something')
         process2 = corpus2.processes.create(creator=self.user, mode=ProcessMode.Files)
 
-        response = self.client.get(reverse('api:process-list'), {'corpus': str(corpus2.id), 'with_workflow': False})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        with self.assertNumQueries(4):
+            response = self.client.get(reverse('api:process-list'), {'corpus': str(corpus2.id), 'with_tasks': False})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(len(data['results']), 1)
         self.assertEqual(data['results'][0]['id'], str(process2.id))
 
     def test_list_filter_corpus_invalid(self):
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'), {'corpus': 'oh-no'})
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+        with self.assertNumQueries(2):
+            response = self.client.get(reverse('api:process-list'), {'corpus': 'oh-no'})
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {'corpus': ["'oh-no' is not a valid UUID"]})
 
     def test_list_filter_mode(self):
         self.client.force_login(self.user)
         assert Process.objects.exclude(mode=ProcessMode.Files).exists()
-        response = self.client.get(reverse('api:process-list'), {'mode': ProcessMode.Files.value, 'with_workflow': False})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        with self.assertNumQueries(7):
+            response = self.client.get(reverse('api:process-list'), {'mode': ProcessMode.Files.value, 'with_tasks': False})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(len(data['results']), 1)
         self.assertEqual(data['results'][0]['id'], str(self.user_img_process.id))
@@ -260,23 +337,39 @@ class TestProcesses(FixtureAPITestCase):
         Filtering by the local mode shows only the local process of the current user
         """
         self.client.force_login(self.user)
+
         with self.assertNumQueries(7):
             response = self.client.get(reverse('api:process-list'), {'mode': ProcessMode.Local.value})
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertDictEqual(response.json(), {
             'count': 1,
             'number': 1,
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(self.local_process),
+                {
+                    'id': str(self.local_process.id),
+                    'name': None,
+                    'mode': 'local',
+                    'corpus': None,
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.local_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.local_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
     def test_list_filter_unexisting_mode(self):
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'), {'mode': 'unexisting_mode'})
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+        with self.assertNumQueries(2):
+            response = self.client.get(reverse('api:process-list'), {'mode': 'unexisting_mode'})
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {'mode': ["Mode 'unexisting_mode' does not exist"]})
 
     def test_list_filter_created(self):
@@ -284,17 +377,53 @@ class TestProcesses(FixtureAPITestCase):
         Display processes that have been created by the user
         """
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'), {'created': 'true'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        with self.assertNumQueries(8):
+            response = self.client.get(reverse('api:process-list'), {'created': 'true'})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertDictEqual(response.json(), {
             'count': 3,
             'number': 1,
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(self.training_process),
-                self._serialize_process(self.user_img_process),
-                self._serialize_process(self.workers_process),
+                {
+                    'id': str(self.training_process.id),
+                    'name': None,
+                    'mode': 'training',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.training_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.training_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.user_img_process.id),
+                    'name': None,
+                    'mode': 'files',
+                    'corpus': str(self.private_corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.user_img_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.user_img_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.workers_process.id),
+                    'name': 'Process fixture',
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.workers_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.workers_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
@@ -303,8 +432,11 @@ class TestProcesses(FixtureAPITestCase):
         Display processes that have not been created by the user
         """
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'), {'created': 'false'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        with self.assertNumQueries(8):
+            response = self.client.get(reverse('api:process-list'), {'created': 'false'})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(data['count'], 2)
         self.assertCountEqual(
@@ -325,8 +457,11 @@ class TestProcesses(FixtureAPITestCase):
             creator=self.user,
             mode=ProcessMode.Files,
         )
-        response = self.client.get(reverse('api:process-list'), {'id': process_id[:10], 'with_workflow': False})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        with self.assertNumQueries(7):
+            response = self.client.get(reverse('api:process-list'), {'id': process_id[:10], 'with_tasks': False})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(len(data['results']), 1)
         self.assertEqual(data['results'][0]['id'], str(process2.id))
@@ -334,143 +469,139 @@ class TestProcesses(FixtureAPITestCase):
     def test_list_filter_name(self):
         self.client.force_login(self.user)
 
-        process_id_1 = str(uuid.uuid4())
         process1 = self.corpus.processes.create(
-            id=process_id_1,
             creator=self.user,
             mode=ProcessMode.Workers,
             name='Number One'
         )
 
-        process_id_2 = str(uuid.uuid4())
         self.corpus.processes.create(
-            id=process_id_2,
             creator=self.user,
             mode=ProcessMode.Workers,
             name='Numero Duo'
         )
 
-        response = self.client.get(reverse('api:process-list'), {'name': 'Numb', 'with_workflow': False})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        data = response.json()
-        self.assertEqual(len(data['results']), 1)
-        self.assertEqual(data['results'][0]['id'], str(process1.id))
-
-    @override_settings(IMPORTS_WORKER_VERSION=None)
-    def test_list_no_tasks(self):
-        """
-        Ensure the Process reports an Unscheduled state when there are no tasks in its workflow
-        """
-        self.assertIsNone(self.user_img_process.workflow)
-        with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
-            self.user_img_process.start()
-        self.user_img_process.workflow.tasks.all().delete()
-
-        self.client.force_login(self.user)
-        with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
-            response = self.client.get(reverse('api:process-list'), {'id': str(self.user_img_process.id)})
+        with self.assertNumQueries(7):
+            response = self.client.get(reverse('api:process-list'), {'name': 'Numb', 'with_tasks': False})
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(len(data['results']), 1)
-        self.assertEqual(data['results'][0]['state'], State.Unscheduled.value)
+        self.assertEqual(data['results'][0]['id'], str(process1.id))
 
     def test_list_process_filter_wrong_state_value(self):
         self.client.force_login(self.user)
-        response = self.client.get(reverse('api:process-list'), {'state': 'spain'})
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+        with self.assertNumQueries(2):
+            response = self.client.get(reverse('api:process-list'), {'state': 'spain'})
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertDictEqual(response.json(), {'state': ["State 'spain' does not exist"]})
 
     def test_list_process_state_filter(self):
         """
         Only last run should be used to filter processes by state
         """
-        workflows = [uuid.uuid4() for i in range(3)]
-        for task in (
-            (workflows[0], 0, State.Failed),
-            (workflows[0], 1, State.Completed),
+        completed_process = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        completed_process.tasks.create(depth=0, run=0, slug='task', state=State.Failed)
+        completed_process.tasks.create(depth=0, run=1, slug='task', state=State.Completed)
+        self.assertEqual(completed_process.state, State.Completed)
 
-            (workflows[1], 0, State.Error),
-            (workflows[1], 0, State.Completed),
+        error_process = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        error_process.tasks.create(depth=0, run=0, slug='error_task', state=State.Error)
+        error_process.tasks.create(depth=0, run=0, slug='completed_task', state=State.Completed)
+        self.assertEqual(error_process.state, State.Error)
 
-            (workflows[2], 0, State.Completed),
-            (workflows[2], 1, State.Completed),
-            (workflows[2], 2, State.Stopped),
-        ):
-            self.build_task(*task)
-        processes = [Process.objects.get(workflow_id=w) for w in workflows]
-        # Only first process is completed
-        self.assertListEqual([p for p in processes if p.state.value == 'completed'], [processes[0]])
+        stopped_process = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        stopped_process.tasks.create(depth=0, run=0, slug='completed_task', state=State.Completed)
+        stopped_process.tasks.create(depth=0, run=1, slug='completed_task', state=State.Completed)
+        stopped_process.tasks.create(depth=0, run=2, slug='stopped_task', state=State.Stopped)
+        self.assertEqual(stopped_process.state, State.Stopped)
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(9):
+        with self.assertNumQueries(8):
             response = self.client.get(reverse('api:process-list'), {'state': 'completed'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        self.assertCountEqual([r['id'] for r in response.json()['results']], [str(processes[0].id)])
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.assertCountEqual([r['id'] for r in response.json()['results']], [str(completed_process.id)])
 
     def test_list_process_state_filter_stopped(self):
-        workflows = [uuid.uuid4() for i in range(4)]
-        for task in (
-            # Should be stopping
-            (workflows[0], 0, State.Error),
-            (workflows[0], 1, State.Stopping),
-            # Should be in error state
-            (workflows[1], 0, State.Error),
-            (workflows[1], 0, State.Stopping),
-            # Should be stopped
-            (workflows[2], 0, State.Unscheduled),
-            (workflows[2], 0, State.Stopped),
-            # Should be stopped
-            (workflows[3], 0, State.Failed),
-            (workflows[3], 1, State.Pending),
-            (workflows[3], 1, State.Stopped),
-        ):
-            self.build_task(*task)
-        processes = [Process.objects.get(workflow_id=w) for w in workflows]
-        self.assertListEqual([p for p in processes if p.state.value == 'stopped'], [processes[2], processes[3]])
+        stopping_process = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        stopping_process.tasks.create(depth=0, run=0, slug='task', state=State.Error)
+        stopping_process.tasks.create(depth=0, run=1, slug='task', state=State.Stopping)
+        self.assertEqual(stopping_process.state, State.Stopping)
+
+        error_process = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        error_process.tasks.create(depth=0, run=0, slug='error_task', state=State.Error)
+        error_process.tasks.create(depth=0, run=0, slug='stopping_task', state=State.Stopping)
+        self.assertEqual(error_process.state, State.Error)
+
+        stopped_process_1 = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        stopped_process_1.tasks.create(depth=0, run=0, slug='unscheduled_task', state=State.Unscheduled)
+        stopped_process_1.tasks.create(depth=0, run=0, slug='stopped_task', state=State.Stopped)
+        self.assertEqual(stopped_process_1.state, State.Stopped)
+
+        stopped_process_2 = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        stopped_process_2.tasks.create(depth=0, run=0, slug='failed_task', state=State.Failed)
+        stopped_process_2.tasks.create(depth=0, run=1, slug='pending_task', state=State.Pending)
+        stopped_process_2.tasks.create(depth=0, run=1, slug='stopped_task', state=State.Stopped)
+        self.assertEqual(stopped_process_2.state, State.Stopped)
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(9):
+        with self.assertNumQueries(8):
             response = self.client.get(reverse('api:process-list'), {'state': 'stopped'})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertCountEqual(
             [r['id'] for r in response.json()['results']],
-            [str(processes[2].id), str(processes[3].id)]
+            [str(stopped_process_1.id), str(stopped_process_2.id)]
         )
 
     def test_list_process_state_filter_unscheduled(self):
         """
         Processes with no task should be interpreted as unscheduled
         """
-        workflow_id = uuid.uuid4()
-        self.build_task(workflow_id, 0, State.Completed)
-        self.build_task(workflow_id, 1, State.Unscheduled)
-        unscheduled_with_workflow = Process.objects.get(workflow_id=workflow_id)
+        unscheduled_with_tasks = Process.objects.create(mode=ProcessMode.Workers, creator=self.user, corpus=self.corpus)
+        unscheduled_with_tasks.tasks.create(depth=0, run=0, slug='task', state=State.Completed)
+        unscheduled_with_tasks.tasks.create(depth=0, run=1, slug='task', state=State.Unscheduled)
+        self.assertEqual(unscheduled_with_tasks.state, State.Unscheduled)
 
-        # Workflows with no tasks should be considered unscheduled
-        self.assertIsNone(self.user_img_process.workflow_id)
+        self.assertFalse(self.user_img_process.tasks.exists())
         self.assertEqual(self.user_img_process.state, State.Unscheduled)
-        self.assertIsNone(self.repository_process.workflow_id)
+        self.assertFalse(self.repository_process.tasks.exists())
         self.assertEqual(self.repository_process.state, State.Unscheduled)
-        self.assertIsNone(self.elts_process.workflow_id)
+        self.assertFalse(self.elts_process.tasks.exists())
         self.assertEqual(self.elts_process.state, State.Unscheduled)
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(9):
-            response = self.client.get(reverse('api:process-list'), {'state': 'unscheduled', 'with_workflow': True})
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        with self.assertNumQueries(8):
+            response = self.client.get(reverse('api:process-list'), {'state': 'unscheduled', 'with_tasks': True})
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertDictEqual(response.json(), {
             'count': 1,
             'number': 1,
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(unscheduled_with_workflow)
+                {
+                    'id': str(unscheduled_with_tasks.id),
+                    'name': None,
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': unscheduled_with_tasks.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': unscheduled_with_tasks.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
-        self.client.force_login(self.user)
         with self.assertNumQueries(4):
-            response = self.client.get(reverse('api:process-list'), {'state': 'unscheduled', 'with_workflow': False})
+            response = self.client.get(reverse('api:process-list'), {'state': 'unscheduled', 'with_tasks': False})
+
         self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.assertDictEqual(response.json(), {
             'count': 5,
@@ -478,11 +609,66 @@ class TestProcesses(FixtureAPITestCase):
             'next': None,
             'previous': None,
             'results': [
-                self._serialize_process(self.training_process),
-                self._serialize_process(self.elts_process),
-                self._serialize_process(self.repository_process),
-                self._serialize_process(self.user_img_process),
-                self._serialize_process(self.workers_process),
+                {
+                    'id': str(self.training_process.id),
+                    'name': None,
+                    'mode': 'training',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.training_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.training_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.elts_process.id),
+                    'name': None,
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.elts_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.elts_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.repository_process.id),
+                    'name': None,
+                    'mode': 'repository',
+                    'corpus': None,
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.repository_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.repository_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.user_img_process.id),
+                    'name': None,
+                    'mode': 'files',
+                    'corpus': str(self.private_corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.user_img_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.user_img_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
+                {
+                    'id': str(self.workers_process.id),
+                    'name': 'Process fixture',
+                    'mode': 'workers',
+                    'corpus': str(self.corpus.id),
+                    'state': 'unscheduled',
+                    'activity_state': 'disabled',
+                    'created': self.workers_process.created.isoformat().replace('+00:00', 'Z'),
+                    'updated': self.workers_process.updated.isoformat().replace('+00:00', 'Z'),
+                    'started': None,
+                    'finished': None,
+                },
             ]
         })
 
@@ -492,44 +678,72 @@ class TestProcesses(FixtureAPITestCase):
         self.assertDictEqual(response.json(), {'detail': 'Authentication credentials were not provided.'})
 
     @override_settings(IMPORTS_WORKER_VERSION=None)
-    def test_details_no_tasks(self):
-        """
-        Ensure the Process reports an Unscheduled state when there are no tasks in its workflow
-        """
-        self.assertIsNone(self.user_img_process.workflow)
-        with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
-            self.user_img_process.start()
-        self.user_img_process.workflow.tasks.all().delete()
-        # A user needs access to a process's corpus to get this process's details
-        self.private_corpus.memberships.create(user=self.user, level=Role.Guest.value)
-        self.client.force_login(self.user)
-        with self.assertNumQueries(7):
-            response = self.client.get(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        data = response.json()
-        self.assertEqual(data['state'], State.Unscheduled.value)
-
     def test_details(self):
         self.client.force_login(self.user)
         # A user needs access to a process's corpus to get this process's details
         self.private_corpus.memberships.create(user=self.user, level=Role.Guest.value)
-        with self.assertNumQueries(6):
+        # Have the endpoint return a task too
+        with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
+            self.user_img_process.start()
+        task = self.user_img_process.tasks.get()
+
+        with self.assertNumQueries(8):
             response = self.client.get(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-        data = response.json()
-        self.assertEqual(data['id'], str(self.user_img_process.id))
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.assertEqual(response.json(), {
+            'activity_state': ActivityState.Disabled.value,
+            'corpus': str(self.private_corpus.id),
+            'element': None,
+            'element_name_contains': None,
+            'element_type': None,
+            'farm': {
+                'id': str(self.farm.id),
+                'name': 'Default farm',
+            },
+            'files': [],
+            'folder_type': None,
+            'id': str(self.user_img_process.id),
+            'load_children': False,
+            'mode': 'files',
+            'model_id': None,
+            'name': None,
+            'revision': None,
+            'state': 'unscheduled',
+            'tasks': [
+                {
+                    'id': str(task.id),
+                    'depth': 0,
+                    'parents': [],
+                    'run': 0,
+                    'shm_size': None,
+                    'slug': 'import',
+                    'state': 'unscheduled',
+                    'tags': [],
+                    'url': 'http://testserver' + reverse('api:task-details', kwargs={'pk': task.id}),
+                }
+            ],
+            'template_id': None,
+            'test_folder_id': None,
+            'train_folder_id': None,
+            'use_cache': False,
+            'use_gpu': False,
+            'validation_folder_id': None,
+        })
 
     def test_details_training_process(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.get(reverse('api:process-details', kwargs={'pk': self.training_process.id}))
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.assertEqual(response.json(), {
             'activity_state': ActivityState.Disabled.value,
             'corpus': str(self.corpus.id),
             'element': None,
             'element_name_contains': None,
             'element_type': None,
+            'farm': None,
             'files': [],
             'folder_type': None,
             'id': str(self.training_process.id),
@@ -539,15 +753,20 @@ class TestProcesses(FixtureAPITestCase):
             'name': None,
             'revision': None,
             'state': 'unscheduled',
+            'tasks': [],
             'template_id': None,
             'test_folder_id': str(self.test.id),
             'train_folder_id': str(self.train.id),
             'use_cache': False,
             'use_gpu': False,
             'validation_folder_id': str(self.val.id),
-            'workflow': None,
         })
 
+    def test_delete_requires_login(self):
+        response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
+        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+        self.assertDictEqual(response.json(), {'detail': 'Authentication credentials were not provided.'})
+
     def test_delete_requires_verified(self):
         user = User.objects.create(email='AAAAAAA')
         self.client.force_login(user)
@@ -561,10 +780,10 @@ class TestProcesses(FixtureAPITestCase):
         """
         self.client.force_login(self.user)
         self.elts_process.start()
-        self.elts_process.workflow.tasks.update(state=State.Running)
+        self.elts_process.tasks.update(state=State.Running)
         response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
-        self.assertDictEqual(response.json(), {'__all__': ['Cannot delete a workflow while it is running']})
+        self.assertDictEqual(response.json(), {'__all__': ['Cannot delete a process while it is running']})
 
     def test_delete_activities_pending(self):
         """
@@ -606,9 +825,9 @@ class TestProcesses(FixtureAPITestCase):
         If no activities exists for this process, it is deleted directly.
         """
         self.client.force_login(self.user)
-        with self.assertNumQueries(13):
+        with self.assertNumQueries(15):
             response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+            self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
         with self.assertRaises(Process.DoesNotExist):
             self.elts_process.refresh_from_db()
 
@@ -617,9 +836,9 @@ class TestProcesses(FixtureAPITestCase):
         A superuser is allowed to delete any process
         """
         self.client.force_login(self.superuser)
-        with self.assertNumQueries(10):
+        with self.assertNumQueries(12):
             response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
+            self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
         with self.assertRaises(Process.DoesNotExist):
             self.user_img_process.refresh_from_db()
 
@@ -633,9 +852,9 @@ class TestProcesses(FixtureAPITestCase):
             element=self.corpus.elements.get(name='Volume 1'),
             worker_version=WorkerVersion.objects.get(worker__slug='reco'),
         )
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
+            self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
         self.assertEqual(delay_mock.call_count, 1)
         (process_arg,), kwargs = delay_mock.call_args
         self.assertEqual(process_arg.id, self.elts_process.id)
@@ -663,7 +882,7 @@ class TestProcesses(FixtureAPITestCase):
             worker_version=WorkerVersion.objects.get(worker__slug='reco'),
             state=WorkerActivityState.Processed,
         )
-        with self.assertNumQueries(11):
+        with self.assertNumQueries(13):
             response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
             self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
         with self.assertRaises(Process.DoesNotExist):
@@ -673,50 +892,54 @@ class TestProcesses(FixtureAPITestCase):
         with self.assertRaises(WorkerActivity.DoesNotExist):
             completed_activity.refresh_from_db()
 
-    def test_update_process_requires_login(self):
-        response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
+    def test_partial_update_requires_login(self):
+        response = self.client.patch(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertDictEqual(response.json(), {'detail': 'Authentication credentials were not provided.'})
 
-    def test_update_process_requires_verified(self):
+    def test_partial_update_requires_verified(self):
         user = User.objects.create(email="unchecked")
         self.client.force_login(user)
-        response = self.client.delete(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
+        response = self.client.patch(reverse('api:process-details', kwargs={'pk': self.user_img_process.id}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertDictEqual(response.json(), {'detail': 'You do not have permission to perform this action.'})
 
-    def test_update_process_running(self):
+    def test_partial_update_running(self):
         """
         A running process cannot be updated... (see following test)
         """
         self.client.force_login(self.user)
         self.elts_process.start()
-        self.elts_process.workflow.tasks.update(state=State.Running)
-        with self.assertNumQueries(7):
-            response = self.client.patch(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
-        self.assertDictEqual(response.json(), {'__all__': ['Cannot edit a workflow while it is running']})
+        self.elts_process.tasks.update(state=State.Running)
+        with self.assertNumQueries(9):
+            response = self.client.patch(
+                reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                {'element_name_contains': 'something'},
+                format='json'
+            )
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(response.json(), {'__all__': ['Cannot edit a process while it is running']})
 
-    def test_update_process_running_only_name(self):
+    def test_partial_update_running_only_name(self):
         """
         Only name attribute of a running process can be patched
         """
         self.client.force_login(self.user)
         self.elts_process.start()
-        self.elts_process.workflow.tasks.update(state=State.Running)
+        self.elts_process.tasks.update(state=State.Running)
         self.assertEqual(self.elts_process.name, None)
-        with self.assertNumQueries(9):
+        with self.assertNumQueries(14):
             response = self.client.patch(
                 reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
                 {'name': 'newName'},
                 format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
         self.elts_process.refresh_from_db()
         self.assertEqual(self.elts_process.name, 'newName')
 
     @override_settings(IMPORTS_WORKER_VERSION=None)
-    def test_update_file_import_element(self):
+    def test_partial_update_file_import_element(self):
         """
         A file import's element can be updated even while it is running
         """
@@ -724,22 +947,24 @@ class TestProcesses(FixtureAPITestCase):
         process = self.corpus.processes.create(mode=ProcessMode.Files, creator=self.user)
         with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             process.start()
-            process.workflow.tasks.update(state=State.Running)
+            process.tasks.update(state=State.Running)
         self.assertIsNone(self.elts_process.element)
 
         # Take any element with an image: this will cause extra queries to retrieve the Image and ImageServer
         element = self.corpus.elements.exclude(image_id=None).first()
-        with (self.assertNumQueries(13), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id))):
+
+        with self.assertNumQueries(15), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             response = self.client.patch(
                 reverse('api:process-details', kwargs={'pk': process.id}),
                 {'element_id': str(element.id)},
                 format='json'
             )
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         process.refresh_from_db()
         self.assertEqual(process.element, element)
 
-    def test_update_process_no_permission(self):
+    def test_partial_update_no_permission(self):
         """
         A user cannot update a process linked to a corpus he has no admin access to
         """
@@ -749,7 +974,7 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertDictEqual(response.json(), {'detail': 'You do not have a sufficient access level to this process.'})
 
-    def test_update_process_repository_requires_admin(self):
+    def test_partial_update_repository_requires_admin(self):
         """
         Edition of a repository import requires to be admin on the repository
         """
@@ -761,7 +986,107 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertDictEqual(response.json(), {'detail': 'You do not have a sufficient access level to this process.'})
 
-    def test_update_process_wrong_mode(self):
+    def test_partial_update_stop(self):
+        """
+        A pending or running process can be stopped
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+
+        for state in [State.Pending, State.Running]:
+            with self.subTest(state=state):
+                self.elts_process.tasks.update(state=state)
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, state)
+
+                with self.assertNumQueries(16):
+                    response = self.client.patch(
+                        reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                        {'state': 'stopping'},
+                        format='json',
+                    )
+                    self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, State.Stopping)
+                self.assertIsNone(self.elts_process.finished)
+
+    def test_partial_update_unscheduled_stop(self):
+        """
+        An unscheduled process can be stopped
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+        self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+        with self.assertNumQueries(17):
+            response = self.client.patch(
+                reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                {'state': 'stopping'},
+                format='json',
+            )
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.elts_process.refresh_from_db()
+        # Since no agents need to do anything to stop the tasks, all the tasks are immediately marked as Stopped
+        self.assertEqual(self.elts_process.state, State.Stopped)
+        # Since everything is stopped, `finished` should be set
+        self.assertIsNotNone(self.elts_process.finished)
+
+    def test_partial_update_state_only_stopping(self):
+        """
+        A process' state can only be updated to `stopping`
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+        self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+        for state in set(State) - {State.Stopping}:
+            with self.subTest(state=state):
+                with self.assertNumQueries(9):
+                    response = self.client.patch(
+                        reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                        {'state': state.value},
+                        format='json',
+                    )
+                    self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+                self.assertDictEqual(response.json(), {
+                    'state': ["Can only change the state to 'stopping'"],
+                })
+
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+    def test_partial_update_state_non_final(self):
+        """
+        Only a process in an unscheduled, pending or running state can be updated to `stopping`
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+
+        for state in set(State) - {State.Unscheduled, State.Pending, State.Running}:
+            with self.subTest(state=state):
+                self.elts_process.tasks.update(state=state)
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, state)
+
+                with self.assertNumQueries(9):
+                    response = self.client.patch(
+                        reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                        {'state': 'stopping'},
+                        format='json',
+                    )
+                    self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+                self.assertDictEqual(response.json(), {
+                    'state': [f'Cannot stop a {state.value} process'],
+                })
+
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, state)
+
+    def test_update_wrong_mode(self):
         """
         Processes that are not of Workers mode cannot be updated, except the 'name' field
         """
@@ -799,7 +1124,7 @@ class TestProcesses(FixtureAPITestCase):
                     {'__all__': ['Only processes of mode Workers can be updated']}
                 )
 
-    def test_update_process_repository(self):
+    def test_update_repository(self):
         """
         Trying to update any field other than the `name` field on a Repository process is not allowed
         """
@@ -827,7 +1152,103 @@ class TestProcesses(FixtureAPITestCase):
             {'element_type': ['Object with slug=page does not exist.']},
         )
 
-    def test_update_process_wrong_mode_patch(self):
+    def test_update_unscheduled_stop(self):
+        """
+        An unscheduled process can be stopped
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+        self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+        with self.assertNumQueries(18):
+            response = self.client.put(
+                reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                {
+                    'name': 'newName',
+                    'template_id': None,
+                    'element_name_contains': 'AAA',
+                    'element_type': 'page',
+                    'load_children': True,
+                    'state': 'stopping',
+                },
+                format='json',
+            )
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.elts_process.refresh_from_db()
+        # Since no agents need to do anything to stop the tasks, all the tasks are immediately marked as Stopped
+        self.assertEqual(self.elts_process.state, State.Stopped)
+        # Since everything is stopped, `finished` should be set
+        self.assertIsNotNone(self.elts_process.finished)
+
+    def test_update_state_only_stopping(self):
+        """
+        A process' state can only be updated to `stopping`
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+        self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+        for state in set(State) - {State.Stopping}:
+            with self.subTest(state=state):
+                with self.assertNumQueries(10):
+                    response = self.client.put(
+                        reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                        {
+                            'name': 'newName',
+                            'template_id': None,
+                            'element_name_contains': 'AAA',
+                            'element_type': 'page',
+                            'load_children': True,
+                            'state': state.value,
+                        },
+                        format='json',
+                    )
+                    self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+                self.assertDictEqual(response.json(), {
+                    'state': ["Can only change the state to 'stopping'"],
+                })
+
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, State.Unscheduled)
+
+    def test_update_state_non_final(self):
+        """
+        Only a process in an unscheduled, pending or running state can be updated to `stopping`
+        """
+        self.client.force_login(self.user)
+        self.elts_process.start()
+
+        for state in set(State) - {State.Unscheduled, State.Pending, State.Running}:
+            with self.subTest(state=state):
+                self.elts_process.tasks.update(state=state)
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, state)
+
+                with self.assertNumQueries(10):
+                    response = self.client.put(
+                        reverse('api:process-details', kwargs={'pk': self.elts_process.id}),
+                        {
+                            'name': 'newName',
+                            'template_id': None,
+                            'element_name_contains': 'AAA',
+                            'element_type': 'page',
+                            'load_children': True,
+                            'state': 'stopping',
+                        },
+                        format='json',
+                    )
+                    self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+                self.assertDictEqual(response.json(), {
+                    'state': [f'Cannot stop a {state.value} process'],
+                })
+
+                self.elts_process.refresh_from_db()
+                self.assertEqual(self.elts_process.state, state)
+
+    def test_partial_update_wrong_mode(self):
         """
         Fields other than 'name' cannot be updated in processes that are not of Workers mode
         """
@@ -852,7 +1273,7 @@ class TestProcesses(FixtureAPITestCase):
 
                 response = self.client.patch(
                     reverse('api:process-details', kwargs={'pk': process.id}),
-                    {},
+                    {'element_name_contains': 'something'},
                     format='json',
                 )
                 self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@@ -861,7 +1282,7 @@ class TestProcesses(FixtureAPITestCase):
                     {'__all__': ['Only processes of mode Workers can be updated']}
                 )
 
-    def test_update_process_only_name(self):
+    def test_partial_update_only_name(self):
         """
         Whatever the process mode, except Local, name attribute (alone) can be updated
         """
@@ -895,14 +1316,14 @@ class TestProcesses(FixtureAPITestCase):
                 process.refresh_from_db()
                 self.assertEqual(process.name, 'newName')
 
-    def test_update_process_corpus_no_write_right(self):
+    def test_partial_update_corpus_no_write_right(self):
         self.client.force_login(self.user)
         self.corpus.memberships.filter(user=self.user).update(level=Role.Guest.value)
         response = self.client.patch(reverse('api:process-details', kwargs={'pk': self.elts_process.id}))
         self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
         self.assertDictEqual(response.json(), {'detail': 'You do not have a sufficient access level to this process.'})
 
-    def test_update_process(self):
+    def test_partial_update(self):
         """
         Process fields related to elements filtering can be updated
         """
@@ -912,7 +1333,8 @@ class TestProcesses(FixtureAPITestCase):
             (process.name, process.name_contains, process.element_type, process.load_children),
             (None, None, None, False)
         )
-        with self.assertNumQueries(9):
+
+        with self.assertNumQueries(12):
             response = self.client.patch(
                 reverse('api:process-details', kwargs={'pk': process.id}),
                 {
@@ -923,14 +1345,15 @@ class TestProcesses(FixtureAPITestCase):
                 },
                 format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         process.refresh_from_db()
         self.assertTupleEqual(
             (process.name, process.name_contains, process.element_type, process.load_children),
             ('newName', 'AAA', self.page_type, True)
         )
 
-    def test_update_process_reset_fields(self):
+    def test_partial_update_reset_fields(self):
         self.client.force_login(self.user)
         process = Process.objects.create(
             mode=ProcessMode.Workers,
@@ -958,7 +1381,7 @@ class TestProcesses(FixtureAPITestCase):
             (None, None, None, False)
         )
 
-    def test_update_process_wrong_type(self):
+    def test_partial_update_wrong_type(self):
         self.client.force_login(self.user)
         process = Process.objects.create(mode=ProcessMode.Workers, corpus=self.corpus, creator=self.user)
         response = self.client.patch(
@@ -972,7 +1395,7 @@ class TestProcesses(FixtureAPITestCase):
             {'element_type': ['Object with slug=unexisting_type does not exist.']}
         )
 
-    def test_update_process_fields(self):
+    def test_partial_update_fields(self):
         """
         Ensure determined fields can be updated on a non started workers process.
         Editable fields are `name`, `name_contains`, `element_type`, and `load_children`.
@@ -990,7 +1413,6 @@ class TestProcesses(FixtureAPITestCase):
                 'files': [1, 2, 3],
                 'mode': 'iiif',
                 'folder_type': 'register',
-                'state': 'success',
                 'name': 'newName',
                 'element_name_contains': 'AAA',
                 'element_type': 'page',
@@ -1017,17 +1439,19 @@ class TestProcesses(FixtureAPITestCase):
             'mode': 'workers',
             'revision': None,
             'state': 'unscheduled',
-            'workflow': None,
             'template_id': None,
             'model_id': None,
             'train_folder_id': None,
             'validation_folder_id': None,
             'test_folder_id': None,
+            'farm': None,
+            'tasks': [],
         })
 
     def test_retry_requires_login(self):
-        response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.user_img_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+        with self.assertNumQueries(0):
+            response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.user_img_process.id}))
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
 
     def test_retry_repository_process_no_right(self):
         """
@@ -1035,67 +1459,82 @@ class TestProcesses(FixtureAPITestCase):
         """
         self.client.force_login(self.user)
         self.repository_process.start()
-        self.repository_process.workflow.tasks.all().update(state=State.Error)
+        self.repository_process.tasks.all().update(state=State.Error)
         self.assertEqual(self.repository_process.state, State.Error)
-        response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.repository_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+        with self.assertNumQueries(9):
+            response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.repository_process.id}))
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
         self.assertDictEqual(response.json(), {'detail': 'You do not have an admin access to this process.'})
 
     def test_retry_only_final(self):
         self.client.force_login(self.user)
         self.elts_process.start()
         parameters = [
-            (State.Unscheduled, 'This workflow is already pending'),
-            (State.Pending, 'This workflow is already pending'),
-            (State.Running, 'This workflow is already running'),
-            (State.Stopping, 'This workflow is stopping'),
+            (State.Unscheduled, 'This process is already pending'),
+            (State.Pending, 'This process is already pending'),
+            (State.Running, 'This process is already running'),
+            (State.Stopping, 'This process is stopping'),
         ]
         for state, message in parameters:
             with self.subTest(state=state):
-                self.elts_process.workflow.tasks.all().update(state=state)
+                self.elts_process.tasks.all().update(state=state)
                 self.assertEqual(self.elts_process.state, state)
-                with self.assertNumQueries(7):
+                with self.assertNumQueries(9):
                     response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.elts_process.id}))
-                self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+                    self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
                 self.assertDictEqual(response.json(), {'__all__': [message]})
 
     @patch('arkindex.project.triggers.process_tasks.initialize_activity.delay')
     def test_retry(self, delay_mock):
         self.client.force_login(self.user)
         self.elts_process.start()
-        self.elts_process.workflow.tasks.all().update(state=State.Error)
+        self.elts_process.tasks.all().update(state=State.Error)
         self.assertEqual(self.elts_process.state, State.Error)
+        self.elts_process.finished = timezone.now()
+        self.elts_process.save()
+
         with self.assertNumQueries(12):
             response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.elts_process.id}))
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.elts_process.refresh_from_db()
         self.assertEqual(self.elts_process.state, State.Unscheduled)
+        # `finished` is unset
+        self.assertIsNone(self.elts_process.finished)
+        # Activity initialization runs again
         self.assertFalse(delay_mock.called)
 
     @patch('arkindex.project.triggers.process_tasks.initialize_activity.delay')
-    def test_retry_no_workflow(self, delay_mock):
+    def test_retry_no_tasks(self, delay_mock):
         self.client.force_login(self.user)
-        self.assertIsNone(self.elts_process.workflow)
+        self.assertFalse(self.elts_process.tasks.exists())
+
         with self.assertNumQueries(15):
             response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.elts_process.id}))
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         self.elts_process.refresh_from_db()
         self.assertEqual(self.elts_process.state, State.Unscheduled)
-        self.assertIsNotNone(self.elts_process.workflow)
+        self.assertTrue(self.elts_process.tasks.exists())
         self.assertFalse(delay_mock.called)
 
     def test_retry_repo_disabled(self):
         self.repository_process.revision = self.rev
         self.repository_process.save()
         self.repository_process.start()
-        self.repository_process.workflow.tasks.all().update(state=State.Error)
+        self.repository_process.tasks.all().update(state=State.Error)
         self.assertEqual(self.repository_process.state, State.Error)
         self.creds.delete()
         # Allow the user to do the retry
         self.repo.memberships.filter(user=self.user).update(level=Role.Admin.value)
         self.client.force_login(self.user)
-        response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.repository_process.id}))
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+        with self.assertNumQueries(8):
+            response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.repository_process.id}))
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), ['Git repository does not have any valid credentials'])
 
     @patch('arkindex.project.triggers.process_tasks.initialize_activity.delay')
@@ -1105,13 +1544,13 @@ class TestProcesses(FixtureAPITestCase):
         """
         self.client.force_login(self.user)
         self.workers_process.start()
-        self.workers_process.workflow.tasks.all().update(state=State.Error)
+        self.workers_process.tasks.all().update(state=State.Error)
         self.assertEqual(self.workers_process.state, State.Error)
 
         self.workers_process.activity_state = ActivityState.Error
         self.workers_process.save()
 
-        with self.assertNumQueries(21):
+        with self.assertNumQueries(20):
             response = self.client.post(reverse('api:process-retry', kwargs={'pk': self.workers_process.id}))
             self.assertEqual(response.status_code, status.HTTP_200_OK)
 
@@ -1146,7 +1585,7 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(process.folder_type.slug, 'volume')
         self.assertEqual(process.element_type.slug, 'page')
         self.assertEqual(self.version_with_model.worker_runs.count(), 1)
-        import_task = process.workflow.tasks.get(slug="import")
+        import_task = process.tasks.get(slug="import")
         self.assertEqual(
             import_task.env["ARKINDEX_WORKER_RUN_ID"],
             str(self.version_with_model.worker_runs.get().id),
@@ -1193,7 +1632,7 @@ class TestProcesses(FixtureAPITestCase):
     def test_from_files_iiif(self):
         self.client.force_login(self.user)
 
-        with self.assertNumQueries(30), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
+        with self.assertNumQueries(29), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
             response = self.client.post(reverse('api:files-process'), {
                 'files': [str(self.iiif_df.id)],
                 'mode': 'iiif',
@@ -1220,7 +1659,7 @@ class TestProcesses(FixtureAPITestCase):
         self.assertIsNone(worker_run.configuration_id)
         self.assertIsNone(worker_run.model_version_id)
 
-        self.assertIsNotNone(process.workflow)
+        self.assertTrue(process.tasks.exists())
 
     @override_settings(IMPORTS_WORKER_VERSION=None)
     def test_from_files_iiif_with_json_charset(self):
@@ -1369,9 +1808,8 @@ class TestProcesses(FixtureAPITestCase):
 
     def test_start_process_process_already_started(self):
         process2 = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
-        process2.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        process2.save()
-        self.assertIsNotNone(process2.workflow)
+        process2.start()
+        self.assertTrue(process2.tasks.exists())
 
         self.client.force_login(self.user)
         response = self.client.post(
@@ -1403,19 +1841,18 @@ class TestProcesses(FixtureAPITestCase):
         run = process2.worker_runs.create(version=self.version_with_model, parents=[], configuration=None)
         run.model_version = self.model_version_1
         run.save()
-        self.assertIsNone(process2.workflow)
+        self.assertFalse(process2.tasks.exists())
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process2.id)})
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         self.assertEqual(response.json()['id'], str(process2.id))
 
     def test_start_process_empty(self):
         process2 = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
-        self.assertIsNone(process2.workflow)
 
         self.client.force_login(self.user)
         with self.assertNumQueries(7):
@@ -1423,6 +1860,7 @@ class TestProcesses(FixtureAPITestCase):
                 reverse('api:process-start', kwargs={'pk': str(process2.id)})
             )
             self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(
             response.json(),
             {'__all__': ['The process must either use thumbnail generation or have worker runs.']},
@@ -1433,7 +1871,7 @@ class TestProcesses(FixtureAPITestCase):
         process2.worker_runs.create(version=self.recognizer, parents=[], configuration=None)
         self.recognizer.state = WorkerVersionState.Error
         self.recognizer.save()
-        self.assertIsNone(process2.workflow)
+        self.assertFalse(process2.tasks.exists())
 
         self.client.force_login(self.user)
         with self.assertNumQueries(7):
@@ -1443,7 +1881,7 @@ class TestProcesses(FixtureAPITestCase):
             self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertEqual(
             response.json(),
-            {'version': 'This process contains one or more unavailable worker versions and cannot be started.'},
+            {'version': ['This process contains one or more unavailable worker versions and cannot be started.']},
         )
 
     def test_start_process_unavailable_model_version(self):
@@ -1451,7 +1889,7 @@ class TestProcesses(FixtureAPITestCase):
         process2.worker_runs.create(version=self.recognizer, parents=[], configuration=None, model_version=self.model_version_1)
         self.model_version_1.state = ModelVersionState.Error
         self.model_version_1.save()
-        self.assertIsNone(process2.workflow)
+        self.assertFalse(process2.tasks.exists())
 
         self.client.force_login(self.user)
         with self.assertNumQueries(7):
@@ -1471,45 +1909,49 @@ class TestProcesses(FixtureAPITestCase):
         """
         process2 = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
         process2.worker_runs.create(version=self.recognizer, parents=[], configuration=None)
-        self.assertIsNone(process2.workflow)
+        self.assertFalse(process2.tasks.exists())
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process2.id)})
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
         self.assertEqual(response.json()['id'], str(process2.id))
         process2.refresh_from_db()
         self.assertEqual(process2.state, State.Unscheduled)
-        self.assertIsNotNone(process2.workflow)
         # Ensure default parameters are used
-        self.assertEqual(process2.workflow.farm_id, get_default_farm_id())
-        self.assertEqual(process2.workflow.tasks.count(), 2)
-        task1, task2 = process2.workflow.tasks.order_by('slug')
+        self.assertEqual(process2.farm_id, get_default_farm_id())
+        self.assertEqual(process2.tasks.count(), 2)
+        task1, task2 = process2.tasks.order_by('slug')
         self.assertEqual(task1.slug, 'initialisation')
         self.assertEqual(task2.slug, f'reco_{str(self.recognizer.id)[:6]}')
         self.assertIn('--chunks-number 1', task1.command)
 
     def test_start_process_select_farm_id(self):
         """
-        A user can specify a ponos farm to use for a workflow
+        A user can specify a ponos farm to use for a process
         """
+        # Run get_default_farm_id once to ensure the new farm is not the default one
         get_default_farm_id()
         barley_farm = Farm.objects.create(name='Barley farm')
         self.assertNotEqual(get_default_farm_id(), barley_farm.id)
         workers_process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
         workers_process.worker_runs.create(version=self.recognizer, parents=[], configuration=None)
         self.client.force_login(self.user)
-        with self.assertNumQueries(20):
+
+        with self.assertNumQueries(19):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(workers_process.id)}),
                 {'farm': str(barley_farm.id)}
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
         workers_process.refresh_from_db()
         self.assertEqual(workers_process.state, State.Unscheduled)
-        self.assertEqual(workers_process.workflow.farm_id, barley_farm.id)
+        self.assertEqual(workers_process.farm_id, barley_farm.id)
 
     def test_start_process_wrong_farm_id(self):
         """
@@ -1518,12 +1960,14 @@ class TestProcesses(FixtureAPITestCase):
         workers_process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
         self.client.force_login(self.user)
         wrong_farm_id = uuid.uuid4()
+
         with self.assertNumQueries(8):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(workers_process.id)}),
                 {'farm': str(wrong_farm_id)}
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {'farm': [f'Invalid pk "{wrong_farm_id}" - object does not exist.']})
 
     def test_start_process_params_validation(self):
@@ -1536,35 +1980,34 @@ class TestProcesses(FixtureAPITestCase):
             ({'thumbnails': 'gloubiboulga'}, {'thumbnails': ['Must be a valid boolean.']})
         ]
         for (params, check) in wrong_params_checks:
-            response = self.client.post(reverse('api:process-start', kwargs={'pk': str(process.id)}), params)
-            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
-            self.assertDictEqual(response.json(), check)
+            with self.subTest(**params), self.assertNumQueries(7):
+                response = self.client.post(reverse('api:process-start', kwargs={'pk': str(process.id)}), params)
+                self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+                self.assertDictEqual(response.json(), check)
 
     @override_settings(MAX_CHUNKS=42)
-    @override_settings(IMPORTS_WORKER_VERSION=None)
     def test_start_process_serializer_chunks_config(self):
         """
-        Assert the start process serializer use configuration max chunks
+        StartProcess should restrict the chunks to `settings.MAX_CHUNKS`
         """
-        # Reloading serializer classes is required to update fields attributes from settings
-        from arkindex.process.serializers import imports
-        imports = importlib.reload(imports)
+        process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
+        process.worker_runs.create(version=self.recognizer, parents=[], configuration=None)
+        self.client.force_login(self.user)
 
-        params_checks = [
-            ({'chunks': 42}, True, None),
-            ({'chunks': 43}, False, {'chunks': ['Ensure this value is less than or equal to 42.']}),
-        ]
-        for (params, is_valid, message) in params_checks:
-            serializer = imports.StartProcessSerializer(data=params)
-            if not is_valid:
-                with self.assertRaisesRegex(ValidationError, 'Ensure this value is less than or equal to 42.'):
-                    self.assertEqual(serializer.is_valid(raise_exception=True))
-            else:
-                self.assertEqual(serializer.is_valid(), True)
+        with self.assertNumQueries(7):
+            response = self.client.post(
+                reverse('api:process-start', kwargs={'pk': str(process.id)}),
+                {'chunks': 43},
+            )
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
+        self.assertDictEqual(response.json(), {
+            'chunks': ['Ensure this value is less than or equal to 42.'],
+        })
 
-    def test_start_process_workflow_parameters(self):
+    def test_start_process_parameters(self):
         """
-        It should be possible to pass chunks and thumbnails parameters when starting a workflow
+        It should be possible to pass chunks and thumbnails parameters when starting a process
         """
         process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
         # Add a worker run to this process
@@ -1575,10 +2018,10 @@ class TestProcesses(FixtureAPITestCase):
             reverse('api:process-start', kwargs={'pk': str(process.id)}),
             {'chunks': 3, 'thumbnails': 'true'}
         )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process.refresh_from_db()
 
-        self.assertEqual(list(process.workflow.tasks.order_by('slug').values_list('slug', flat=True)), [
+        self.assertEqual(list(process.tasks.order_by('slug').values_list('slug', flat=True)), [
             'initialisation',
             f'reco_{str(self.recognizer.id)[:6]}_1',
             f'reco_{str(self.recognizer.id)[:6]}_2',
@@ -1608,9 +2051,9 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(response.json(), {
             '__all__': ['The process must either use thumbnail generation or have worker runs.'],
-            'use_cache': 'The process must have workers attached to use cached results.',
-            'worker_activity': 'The process must have workers attached to handle their activity.',
-            'use_gpu': 'The process must have workers attached to use GPUs.'
+            'use_cache': ['The process must have workers attached to use cached results.'],
+            'worker_activity': ['The process must have workers attached to handle their activity.'],
+            'use_gpu': ['The process must have workers attached to use GPUs.'],
         })
         process.refresh_from_db()
         self.assertFalse(process.use_cache)
@@ -1639,12 +2082,14 @@ class TestProcesses(FixtureAPITestCase):
         worker_runs_mock.all.return_value = [run_mock]
 
         self.client.force_login(self.user)
-        with self.assertNumQueries(15):
+
+        with self.assertNumQueries(14):
             response = self.client.post(
                 reverse('api:process-start', kwargs={'pk': str(process.id)}),
                 {'use_cache': 'true', 'worker_activity': 'true', 'use_gpu': 'true'}
             )
-            self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+
         process.refresh_from_db()
         self.assertTrue(process.use_cache)
         self.assertTrue(process.use_gpu)
@@ -1657,7 +2102,7 @@ class TestProcesses(FixtureAPITestCase):
     @patch('arkindex.process.models.task_token_default')
     def test_pdf_import_build_workflow(self, token_mock):
         """
-        Build a workflow for a PDF import with a worker version defined in settings
+        Build tasks for a PDF import with a worker version defined in settings
         """
         process = self.corpus.processes.create(
             creator=self.user,
@@ -1667,7 +2112,7 @@ class TestProcesses(FixtureAPITestCase):
         token_mock.return_value = '12345'
 
         with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
-            workflow = process.build_workflow()
+            process.build_workflow()
 
         worker_run = process.worker_runs.get()
         self.assertEqual(worker_run.version, self.import_worker_version)
@@ -1675,7 +2120,7 @@ class TestProcesses(FixtureAPITestCase):
         self.assertIsNone(worker_run.configuration_id)
         self.assertIsNone(worker_run.model_version_id)
 
-        import_task = workflow.tasks.get(slug='import')
+        import_task = process.tasks.get(slug='import')
         self.assertEqual(import_task.env, {
             'ARKINDEX_API_TOKEN': 'testToken',
             'ARKINDEX_PROCESS_ID': str(process.id),
@@ -1692,7 +2137,7 @@ class TestProcesses(FixtureAPITestCase):
     @patch('arkindex.process.models.task_token_default')
     def test_worker_run_model_version_build_workflow(self, token_mock):
         """
-        Build a workflow for a PDF import with a worker version defined in settings
+        Build tasks for a PDF import with a worker version defined in settings
         """
         process = self.corpus.processes.create(
             creator=self.user,
@@ -1702,9 +2147,9 @@ class TestProcesses(FixtureAPITestCase):
         run = process.worker_runs.create(version=self.version_with_model, parents=[], configuration=None)
         run.model_version = self.model_version_1
         run.save()
-        workflow = process.build_workflow()
+        process.build_workflow()
 
-        initialization_task = workflow.tasks.get(slug='initialisation')
+        initialization_task = process.tasks.get(slug='initialisation')
         self.assertEqual(initialization_task.env, {
             'ARKINDEX_API_TOKEN': 'testToken',
             'ARKINDEX_PROCESS_ID': str(process.id),
@@ -1714,7 +2159,7 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(initialization_task.command, f'python -m arkindex_tasks.init_elements {process.id} --chunks-number 1')
         self.assertEqual(initialization_task.image, 'registry.teklia.com/tasks')
 
-        worker_task = workflow.tasks.get(slug=f'generic_{str(self.version_with_model.id)[:6]}')
+        worker_task = process.tasks.get(slug=f'generic_{str(self.version_with_model.id)[:6]}')
         self.assertEqual(worker_task.env, {
             'ARKINDEX_API_TOKEN': 'testToken',
             'ARKINDEX_PROCESS_ID': str(process.id),
@@ -1812,7 +2257,7 @@ class TestProcesses(FixtureAPITestCase):
         process.worker_runs.create(version=self.recognizer, parents=[], configuration=None)
         self.assertEqual(process.worker_runs.count(), 2)
         process.start()
-        process.workflow.tasks.update(state=State.Running)
+        process.tasks.update(state=State.Running)
         self.assertEqual(process.state, State.Running)
 
         self.client.force_login(self.user)
@@ -1821,9 +2266,9 @@ class TestProcesses(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(response.json(), {'__all__': ['A process can only be cleared before getting started.']})
 
-    def test_clear_process_unscheduled_workflow(self):
+    def test_clear_process_unscheduled(self):
         """
-        Cannot clear a process that has a workflow, even unscheduled
+        Cannot clear a process that has tasks, even unscheduled
         """
         process = self.corpus.processes.create(
             creator=self.user,
@@ -1839,7 +2284,7 @@ class TestProcesses(FixtureAPITestCase):
         self.client.force_login(self.user)
         with self.assertNumQueries(7):
             response = self.client.delete(reverse('api:clear-process', kwargs={'pk': str(process.id)}))
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(response.json(), {'__all__': ['A process can only be cleared before getting started.']})
 
     def test_clear_process_requires_permissions(self):
@@ -1926,13 +2371,13 @@ class TestProcesses(FixtureAPITestCase):
             State.Unscheduled,
         }
         self.elts_process.start()
-        self.assertEqual(self.elts_process.workflow.tasks.get().run, 0)
+        self.assertEqual(self.elts_process.tasks.get().run, 0)
         self.elts_process.activity_state = ActivityState.Ready
         self.elts_process.save()
         self.client.force_login(self.user)
         for state in unfinished_states:
             with self.subTest(state=state):
-                self.elts_process.workflow.tasks.update(state=state)
+                self.elts_process.tasks.update(state=state)
                 self.assertEqual(self.elts_process.state, state)
                 response = self.client.post(
                     reverse('api:process-select-failures', kwargs={'pk': str(self.elts_process.id)})
@@ -1945,8 +2390,8 @@ class TestProcesses(FixtureAPITestCase):
 
     def test_select_failed_elts_no_failure(self):
         self.elts_process.start()
-        self.assertEqual(self.elts_process.workflow.tasks.get().run, 0)
-        self.elts_process.workflow.tasks.update(state=State.Completed)
+        self.assertEqual(self.elts_process.tasks.get().run, 0)
+        self.elts_process.tasks.update(state=State.Completed)
         self.elts_process.activity_state = ActivityState.Ready
         self.elts_process.save()
         # Process has a single element that is ok
@@ -1959,7 +2404,7 @@ class TestProcesses(FixtureAPITestCase):
             state=WorkerActivityState.Processed,
         )
         self.client.force_login(self.user)
-        with self.assertNumQueries(11):
+        with self.assertNumQueries(7):
             response = self.client.post(
                 reverse('api:process-select-failures', kwargs={'pk': str(self.elts_process.id)})
             )
@@ -1971,8 +2416,8 @@ class TestProcesses(FixtureAPITestCase):
 
     def test_select_failed_elts(self):
         self.elts_process.start()
-        self.assertEqual(self.elts_process.workflow.tasks.get().run, 0)
-        self.elts_process.workflow.tasks.update(state=State.Completed)
+        self.assertEqual(self.elts_process.tasks.get().run, 0)
+        self.elts_process.tasks.update(state=State.Completed)
         self.elts_process.activity_state = ActivityState.Ready
         self.elts_process.save()
 
@@ -1995,7 +2440,7 @@ class TestProcesses(FixtureAPITestCase):
 
         self.assertCountEqual(list(self.user.selections.values_list("element__name", flat=True)), [])
         self.client.force_login(self.user)
-        with self.assertNumQueries(13):
+        with self.assertNumQueries(9):
             response = self.client.post(
                 reverse('api:process-select-failures', kwargs={'pk': str(self.elts_process.id)})
             )
diff --git a/arkindex/process/tests/test_repos.py b/arkindex/process/tests/test_repos.py
index c6abf21a47..c58238ac06 100644
--- a/arkindex/process/tests/test_repos.py
+++ b/arkindex/process/tests/test_repos.py
@@ -5,7 +5,7 @@ from rest_framework import status
 from rest_framework.exceptions import ValidationError
 from rest_framework.serializers import DateTimeField
 
-from arkindex.ponos.models import State, Workflow
+from arkindex.ponos.models import State, Task
 from arkindex.process.models import ActivityState, Process, ProcessMode, Repository
 from arkindex.project.tests import FixtureTestCase
 from arkindex.users.models import Role, User
@@ -72,7 +72,7 @@ class TestRepositories(FixtureTestCase):
         """
         self.repo.credentials = None
         self.repo.save()
-        workflows_count = Workflow.objects.count()
+        task_count = Task.objects.count()
 
         di = Process.objects.create(
             mode=ProcessMode.Repository,
@@ -86,7 +86,7 @@ class TestRepositories(FixtureTestCase):
         with self.assertRaises(ValidationError):
             di.retry()
 
-        self.assertEqual(Workflow.objects.count(), workflows_count)
+        self.assertEqual(Task.objects.count(), task_count)
 
     def test_list_repository_requires_login(self):
         with self.assertNumQueries(0):
@@ -172,7 +172,7 @@ class TestRepositories(FixtureTestCase):
         """
         process = self.repo_2.revisions.first().processes.create(mode=ProcessMode.Repository, creator=self.user)
         process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         self.repo_2.memberships.create(user=self.user, level=Role.Guest.value)
 
         with self.assertNumQueries(6):
@@ -197,7 +197,7 @@ class TestRepositories(FixtureTestCase):
         """
         process = self.repo.revisions.first().processes.create(mode=ProcessMode.Repository, creator=self.user)
         process.start()
-        task = process.workflow.tasks.get()
+        task = process.tasks.get()
         self.repo_2.memberships.create(user=self.user, level=Role.Guest.value)
 
         with self.assertNumQueries(6):
@@ -339,9 +339,9 @@ class TestRepositories(FixtureTestCase):
         git_provider_mock().get_or_create_revision.return_value = self.rev, None
         repo = Repository.objects.create(url='http://somewhere.com/repo')
         git_provider_mock().create_repo.return_value = repo
-        with self.assertNumQueries(10):
+        with self.assertNumQueries(11):
             response = self.client.post(reverse('api:available-repositories', kwargs={'pk': self.creds.id}), {'id': 1111})
-        self.assertEqual(response.status_code, status.HTTP_201_CREATED)
+            self.assertEqual(response.status_code, status.HTTP_201_CREATED)
         process = Process.objects.get(revision=self.rev)
         self.assertEqual(response.json(), {
             'name': 'Import 1337 from http://my_repo.fake/workers/worker',
@@ -359,7 +359,6 @@ class TestRepositories(FixtureTestCase):
             'mode': ProcessMode.Repository.value,
             'revision': self.serialized_revision,
             'state': State.Unscheduled.value,
-            'workflow': None,
             'template_id': None,
             'model_id': None,
             'train_folder_id': None,
diff --git a/arkindex/process/tests/test_signals.py b/arkindex/process/tests/test_signals.py
index 59164485e3..1b1813b220 100644
--- a/arkindex/process/tests/test_signals.py
+++ b/arkindex/process/tests/test_signals.py
@@ -273,7 +273,7 @@ class TestSignals(FixtureAPITestCase):
         self.process_2.activity_state = ActivityState.Ready
         self.process_2.save()
         self.process_2.start()
-        task = self.process_2.workflow.tasks.first()
+        task = self.process_2.tasks.first()
 
         # Create one activity per WorkerActivityState on random elements
         element1, element2, element3, element4 = self.corpus.elements.all()[:4]
@@ -316,7 +316,7 @@ class TestSignals(FixtureAPITestCase):
             # Reset activity state between each case
             started_activity.state = WorkerActivityState.Started
             started_activity.save()
-            with self.subTest(state=new_task_state):
+            with self.subTest(old_state=old_task_state, new_state=new_task_state):
                 task.state = old_task_state
                 task.save()
 
diff --git a/arkindex/process/tests/test_templates.py b/arkindex/process/tests/test_templates.py
index 011ecfd566..daf73026bb 100644
--- a/arkindex/process/tests/test_templates.py
+++ b/arkindex/process/tests/test_templates.py
@@ -80,7 +80,7 @@ class TestTemplates(FixtureAPITestCase):
 
     def test_create_process_template(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(14):
+        with self.assertNumQueries(15):
             response = self.client.post(
                 reverse(
                     "api:create-process-template", kwargs={"pk": str(self.process_template.id)}
@@ -259,7 +259,7 @@ class TestTemplates(FixtureAPITestCase):
 
     def test_apply_process_template(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(19):
+        with self.assertNumQueries(20):
             response = self.client.post(
                 reverse('api:apply-process-template', kwargs={'pk': str(self.template.id)}),
                 data=json.dumps({"process_id": str(self.process.id)}),
@@ -293,7 +293,7 @@ class TestTemplates(FixtureAPITestCase):
             parents=[],
         )
         # Apply a template that has two other worker runs
-        with self.assertNumQueries(19):
+        with self.assertNumQueries(20):
             response = self.client.post(
                 reverse('api:apply-process-template', kwargs={'pk': str(self.template.id)}),
                 data=json.dumps({"process_id": str(process.id)}),
@@ -355,10 +355,10 @@ class TestTemplates(FixtureAPITestCase):
         with self.assertNumQueries(7):
             response = self.client.get(
                 reverse('api:process-list'),
-                data={"mode": 'template', "with_workflow": True},
+                data={"mode": 'template', "with_tasks": True},
                 content_type='application/json',
             )
         self.assertEqual(response.status_code, status.HTTP_200_OK)
-        # The 'with_workflow' filter should be ignored and some templates should be returned
+        # The 'with_tasks' filter should be ignored and some templates should be returned
         # If it wasn't, no template are returned because none are configured
         self.assertTrue(len(response.json()) > 0)
diff --git a/arkindex/process/tests/test_transkribus_import.py b/arkindex/process/tests/test_transkribus_import.py
index fc4fae4acc..1c8d373825 100644
--- a/arkindex/process/tests/test_transkribus_import.py
+++ b/arkindex/process/tests/test_transkribus_import.py
@@ -140,22 +140,21 @@ class TestTranskribusImport(FixtureAPITestCase):
         self.assertIsNone(worker_run.configuration_id)
         self.assertIsNone(worker_run.model_version_id)
 
-        self.assertIsNotNone(process.workflow)
-        tasks_slugs = list(process.workflow.tasks.order_by('slug').values_list('slug', flat=True))
+        tasks_slugs = list(process.tasks.order_by('slug').values_list('slug', flat=True))
         self.assertEqual(len(tasks_slugs), 3)
         self.assertEqual(tasks_slugs, ['export_transkribus', 'import_arkindex', 'thumbnails'])
 
-        export_task = process.workflow.tasks.get(slug='export_transkribus')
+        export_task = process.tasks.get(slug='export_transkribus')
         self.assertEqual(export_task.command, 'python -m arkindex_tasks.export_transkribus 12345')
         self.assertEqual(export_task.image, 'registry.teklia.com/tasks')
         self.assertEqual(list(export_task.parents.all()), [])
 
-        import_task = process.workflow.tasks.get(slug='import_arkindex')
+        import_task = process.tasks.get(slug='import_arkindex')
         self.assertEqual(import_task.command, f'python -m arkindex_tasks.import_transkribus --job-path /data/export_transkribus/transkribus_export_job.json --corpus {corpus.id}')
         self.assertEqual(import_task.image, 'registry.teklia.com/tasks')
         self.assertEqual(list(import_task.parents.all()), [export_task])
 
-        thumbnails_task = process.workflow.tasks.get(slug='thumbnails')
+        thumbnails_task = process.tasks.get(slug='thumbnails')
         self.assertEqual(thumbnails_task.command, 'python3 -m arkindex_tasks.generate_thumbnails /data/import_arkindex/elements.json')
         self.assertEqual(thumbnails_task.image, 'registry.teklia.com/tasks')
         self.assertEqual(list(thumbnails_task.parents.all()), [import_task])
diff --git a/arkindex/process/tests/test_workeractivity.py b/arkindex/process/tests/test_workeractivity.py
index 585daf3037..23e2b6a91d 100644
--- a/arkindex/process/tests/test_workeractivity.py
+++ b/arkindex/process/tests/test_workeractivity.py
@@ -40,7 +40,7 @@ class TestWorkerActivity(FixtureTestCase):
         cls.process.worker_runs.create(version=cls.worker_version, parents=[], configuration=cls.configuration)
         cls.worker_type = WorkerType.objects.get(slug='recognizer')
         cls.process.start()
-        cls.task = cls.process.workflow.tasks.get(slug=cls.worker_version.slug)
+        cls.task = cls.process.tasks.get(slug=cls.worker_version.slug)
 
     def setUp(self):
         super().setUp()
@@ -226,7 +226,7 @@ class TestWorkerActivity(FixtureTestCase):
         process2 = Process.objects.create(mode=ProcessMode.Repository, creator=self.user, revision=rev)
         process2.save()
         process2.start()
-        task = process2.workflow.tasks.first()
+        task = process2.tasks.first()
 
         with self.assertNumQueries(2):
             response = self.client.put(
diff --git a/arkindex/process/tests/test_workerruns.py b/arkindex/process/tests/test_workerruns.py
index 9931783c60..e7af6fff1d 100644
--- a/arkindex/process/tests/test_workerruns.py
+++ b/arkindex/process/tests/test_workerruns.py
@@ -5,9 +5,8 @@ from django.test import override_settings
 from django.urls import reverse
 from rest_framework import status
 
-from arkindex.ponos.models import Artifact, State, Workflow
+from arkindex.ponos.models import Artifact, State
 from arkindex.process.models import GitRefType, ProcessMode, Revision, WorkerRun, WorkerVersion, WorkerVersionState
-from arkindex.process.utils import get_default_farm_id
 from arkindex.project.tests import FixtureAPITestCase
 from arkindex.training.models import Model, ModelVersion, ModelVersionState
 from arkindex.users.models import Role
@@ -63,9 +62,11 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_runs_list(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(9):
+
+        with self.assertNumQueries(10):
             response = self.client.get(reverse('api:worker-run-list', kwargs={'pk': str(self.process_1.id)}))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         data = response.json()
         self.assertEqual(data['results'], [{
             'id': str(self.run_1.id),
@@ -97,7 +98,7 @@ class TestWorkerRuns(FixtureAPITestCase):
             'parents': [],
             'model_version': None,
             'configuration': None,
-            'process':{
+            'process': {
                 'id': str(self.process_1.id),
                 'activity_state': 'disabled',
                 'corpus': str(self.corpus.id),
@@ -107,8 +108,7 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'state': 'unscheduled',
                 'test_folder_id': None,
                 'train_folder_id': None,
-                'validation_folder_id': None,
-                'workflow': None
+                'validation_folder_id': None
             },
         }])
 
@@ -199,8 +199,8 @@ class TestWorkerRuns(FixtureAPITestCase):
         process = self.corpus.processes.create(
             creator=self.user,
             mode=ProcessMode.Workers,
-            workflow=Workflow.objects.create(farm_id=get_default_farm_id()),
         )
+        process.start()
 
         self.client.force_login(self.user)
         response = self.client.post(
@@ -263,7 +263,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None,
             },
         })
         run = WorkerRun.objects.get(pk=pk)
@@ -357,7 +356,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
         })
         run = WorkerRun.objects.get(pk=pk)
@@ -453,7 +451,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None,
             },
         })
         run = WorkerRun.objects.get(pk=pk)
@@ -491,7 +488,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -548,7 +544,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -712,7 +707,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
         })
 
@@ -731,14 +725,15 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_update_run_requires_id_and_parents(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
                     'configuration_id': str(self.configuration_1.id),
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertDictEqual(response.json(), {'parents': ['This field is required.']})
 
     def test_update_run_requires_login(self):
@@ -772,14 +767,16 @@ class TestWorkerRuns(FixtureAPITestCase):
         """
         self.corpus.memberships.filter(user=self.user).update(level=Role.Guest.value)
         self.client.force_login(self.user)
-        with self.assertNumQueries(5):
+
+        with self.assertNumQueries(6):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
                     'parents': []
                 }
             )
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
         self.assertEqual(response.json(), {'detail': 'You do not have an admin access to the process project.'})
 
     def test_update_run_local(self):
@@ -790,7 +787,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         run = local_process.worker_runs.create(version=self.version_1, parents=[])
         self.client.force_login(self.user)
 
-        with self.assertNumQueries(3):
+        with self.assertNumQueries(4):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(run.id)}),
                 data={
@@ -829,14 +826,15 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_update_run_inexistant_parent(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
                     'parents': ['12341234-1234-1234-1234-123412341234'],
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), [
             f"Can't add or update WorkerRun {self.run_1.id} because parents field isn't properly defined. It can be either because"
             " one or several UUIDs don't refer to existing WorkerRuns or either because listed WorkerRuns doesn't belong to the"
@@ -874,7 +872,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -936,7 +933,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1005,7 +1001,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1040,13 +1035,15 @@ class TestWorkerRuns(FixtureAPITestCase):
     def test_update_run_invalid_configuration(self):
         self.client.force_login(self.user)
         self.assertEqual(self.run_1.configuration, None)
-        with self.assertNumQueries(7):
+
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': self.run_1.id}),
                 data={'parents': [], 'configuration_id': str(self.configuration_2.id)},
                 format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertDictEqual(response.json(), {'configuration_id': ['The configuration must be part of the same worker.']})
 
     def test_update_run_process_already_started(self):
@@ -1054,16 +1051,18 @@ class TestWorkerRuns(FixtureAPITestCase):
         Update dependencies of a worker run is not possible once the process is started
         """
         self.process_1.start()
-        self.assertIsNotNone(self.process_1.workflow)
+        self.assertTrue(self.process_1.tasks.exists())
         self.client.force_login(self.user)
-        with self.assertNumQueries(6):
+
+        with self.assertNumQueries(7):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
                     'parents': [],
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             '__all__': ['Cannot update a WorkerRun on a Process that has already started']
         })
@@ -1088,7 +1087,8 @@ class TestWorkerRuns(FixtureAPITestCase):
             version=version_no_model,
             parents=[],
         )
-        with self.assertNumQueries(7):
+
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
@@ -1096,7 +1096,8 @@ class TestWorkerRuns(FixtureAPITestCase):
                     'parents': []
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['This worker version does not support model usage.']
         })
@@ -1122,7 +1123,8 @@ class TestWorkerRuns(FixtureAPITestCase):
             parents=[],
         )
         random_model_version_uuid = str(uuid.uuid4())
-        with self.assertNumQueries(7):
+
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
@@ -1130,7 +1132,8 @@ class TestWorkerRuns(FixtureAPITestCase):
                     'parents': []
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': [f'Invalid pk "{random_model_version_uuid}" - object does not exist.']
         })
@@ -1160,7 +1163,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         model_no_access = Model.objects.create(name='Secret model')
         model_version_no_access = ModelVersion.objects.create(model=model_no_access, state=ModelVersionState.Available, size=8, hash='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', archive_hash='bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb')
 
-        with self.assertNumQueries(9):
+        with self.assertNumQueries(10):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
@@ -1168,7 +1171,8 @@ class TestWorkerRuns(FixtureAPITestCase):
                     'parents': []
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['You do not have access to this model version.']
         })
@@ -1193,7 +1197,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         self.model_version_1.state = ModelVersionState.Error
         self.model_version_1.save()
 
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.put(
                 reverse('api:worker-run-details', kwargs={'pk': str(run.id)}),
                 data={
@@ -1202,6 +1206,7 @@ class TestWorkerRuns(FixtureAPITestCase):
                 }, format='json'
             )
             self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['This model version is not in an available state.']
         })
@@ -1266,7 +1271,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(version_with_model.id),
@@ -1364,7 +1368,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(version_with_model.id),
@@ -1437,7 +1440,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1497,11 +1499,13 @@ class TestWorkerRuns(FixtureAPITestCase):
         """
         self.corpus.memberships.filter(user=self.user).update(level=Role.Guest.value)
         self.client.force_login(self.user)
-        with self.assertNumQueries(5):
+
+        with self.assertNumQueries(6):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)})
             )
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
         self.assertEqual(response.json(), {'detail': 'You do not have an admin access to the process project.'})
 
     def test_partial_update_run_invalid_id(self):
@@ -1538,7 +1542,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         run = local_process.worker_runs.create(version=self.version_1, parents=[])
         self.client.force_login(self.user)
 
-        with self.assertNumQueries(3):
+        with self.assertNumQueries(4):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(run.id)}),
                 data={
@@ -1551,7 +1555,7 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_partial_update_run_inexistant_parent(self):
         self.client.force_login(self.user)
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
@@ -1595,7 +1599,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1657,7 +1660,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1723,7 +1725,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -1757,13 +1758,13 @@ class TestWorkerRuns(FixtureAPITestCase):
     def test_partial_update_run_invalid_configuration(self):
         self.client.force_login(self.user)
         self.assertEqual(self.run_1.configuration, None)
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': self.run_1.id}),
                 data={'configuration_id': str(self.configuration_2.id)},
                 format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(response.json(), {'configuration_id': ['The configuration must be part of the same worker.']})
 
     def test_partial_update_run_process_already_started(self):
@@ -1771,9 +1772,9 @@ class TestWorkerRuns(FixtureAPITestCase):
         Update dependencies of a worker run is not possible once the process is started
         """
         self.process_1.start()
-        self.assertIsNotNone(self.process_1.workflow)
+        self.assertTrue(self.process_1.tasks.exists())
         self.client.force_login(self.user)
-        with self.assertNumQueries(6):
+        with self.assertNumQueries(7):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)}),
                 data={
@@ -1805,14 +1806,15 @@ class TestWorkerRuns(FixtureAPITestCase):
             version=version_no_model,
             parents=[],
         )
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
                     'model_version_id': str(self.model_version_1.id),
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['This worker version does not support model usage.']
         })
@@ -1838,14 +1840,16 @@ class TestWorkerRuns(FixtureAPITestCase):
             parents=[],
         )
         random_model_version_uuid = str(uuid.uuid4())
-        with self.assertNumQueries(7):
+
+        with self.assertNumQueries(8):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
                     'model_version_id': random_model_version_uuid,
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': [f'Invalid pk "{random_model_version_uuid}" - object does not exist.']
         })
@@ -1875,14 +1879,15 @@ class TestWorkerRuns(FixtureAPITestCase):
         model_no_access = Model.objects.create(name='Secret model')
         model_version_no_access = ModelVersion.objects.create(model=model_no_access, state=ModelVersionState.Available, size=8, hash='aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', archive_hash='bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb')
 
-        with self.assertNumQueries(9):
+        with self.assertNumQueries(10):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(run_2.id)}),
                 data={
                     'model_version_id': str(model_version_no_access.id),
                 }, format='json'
             )
-        self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['You do not have access to this model version.']
         })
@@ -1907,7 +1912,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         self.model_version_1.state = ModelVersionState.Error
         self.model_version_1.save()
 
-        with self.assertNumQueries(7):
+        with self.assertNumQueries(8):
             response = self.client.patch(
                 reverse('api:worker-run-details', kwargs={'pk': str(run.id)}),
                 data={
@@ -1916,6 +1921,7 @@ class TestWorkerRuns(FixtureAPITestCase):
                 }, format='json'
             )
             self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+
         self.assertEqual(response.json(), {
             'model_version_id': ['This model version is not in an available state.']
         })
@@ -1950,6 +1956,7 @@ class TestWorkerRuns(FixtureAPITestCase):
                 }, format='json'
             )
             self.assertEqual(response.status_code, status.HTTP_200_OK)
+
         run.refresh_from_db()
         self.assertEqual(response.json(), {
             'id': str(run.id),
@@ -1977,7 +1984,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(version_with_model.id),
@@ -2071,7 +2077,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(version_with_model.id),
@@ -2143,7 +2148,6 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'test_folder_id': None,
                 'train_folder_id': None,
                 'validation_folder_id': None,
-                'workflow': None
             },
             'worker_version': {
                 'id': str(self.version_1.id),
@@ -2274,7 +2278,7 @@ class TestWorkerRuns(FixtureAPITestCase):
         """
         self.client.force_login(self.user)
         self.process_1.start()
-        self.process_1.workflow.tasks.update(state=State.Running)
+        self.process_1.tasks.update(state=State.Running)
         response = self.client.delete(
             reverse('api:worker-run-details', kwargs={'pk': str(self.run_1.id)})
         )
@@ -2283,8 +2287,7 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_build_task_no_parent(self):
         self.version_1.docker_image_id = self.artifact.id
-        self.process_1.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        task, parent_slugs = self.run_1.build_task(self.process_1.workflow, 'test', ENV.copy(), 'import', '/data/import/elements.json')
+        task, parent_slugs = self.run_1.build_task(self.process_1, 'test', ENV.copy(), 'import', '/data/import/elements.json')
 
         self.assertEqual(task.slug, 'test')
         self.assertEqual(task.image, f'my_repo.fake/workers/worker/reco:{str(self.version_1.id)}')
@@ -2302,8 +2305,7 @@ class TestWorkerRuns(FixtureAPITestCase):
 
     def test_build_task_with_chunk(self):
         self.version_1.docker_image_id = self.artifact.id
-        self.process_1.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        task, parent_slugs = self.run_1.build_task(self.process_1.workflow, 'test', ENV.copy(), 'import', '/data/import/elements.json', chunk=4)
+        task, parent_slugs = self.run_1.build_task(self.process_1, 'test', ENV.copy(), 'import', '/data/import/elements.json', chunk=4)
 
         self.assertEqual(task.slug, 'test_4')
         self.assertEqual(task.image, f'my_repo.fake/workers/worker/reco:{str(self.version_1.id)}')
@@ -2338,8 +2340,7 @@ class TestWorkerRuns(FixtureAPITestCase):
             parents=[self.run_1.id],
         )
 
-        self.process_1.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        task, parent_slugs = run_2.build_task(self.process_1.workflow, f'reco_{str(version_2.id)[0:6]}', ENV.copy(), 'import', '/data/import/elements.json')
+        task, parent_slugs = run_2.build_task(self.process_1, f'reco_{str(version_2.id)[0:6]}', ENV.copy(), 'import', '/data/import/elements.json')
 
         self.assertEqual(task.slug, f'reco_{str(version_2.id)[0:6]}')
         self.assertEqual(task.image, f'my_repo.fake/workers/worker/reco:{str(version_2.id)}')
@@ -2372,8 +2373,7 @@ class TestWorkerRuns(FixtureAPITestCase):
             parents=[self.run_1.id],
         )
 
-        self.process_1.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        task, parent_slugs = run_2.build_task(self.process_1.workflow, f'reco_{str(version_2.id)[0:6]}', ENV.copy(), 'import', '/data/import/elements.json', chunk=4)
+        task, parent_slugs = run_2.build_task(self.process_1, f'reco_{str(version_2.id)[0:6]}', ENV.copy(), 'import', '/data/import/elements.json', chunk=4)
 
         self.assertEqual(task.slug, f'reco_{str(version_2.id)[0:6]}_4')
         self.assertEqual(task.image, f'my_repo.fake/workers/worker/reco:{str(version_2.id)}')
@@ -2397,8 +2397,7 @@ class TestWorkerRuns(FixtureAPITestCase):
                 'shm_size': 505,
             }
         }
-        self.process_1.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        task, parent_slugs = self.run_1.build_task(self.process_1.workflow, 'test', ENV.copy(), 'import', '/data/import/elements.json')
+        task, parent_slugs = self.run_1.build_task(self.process_1, 'test', ENV.copy(), 'import', '/data/import/elements.json')
 
         self.assertEqual(task.slug, 'test')
         self.assertEqual(task.image, f'my_repo.fake/workers/worker/reco:{str(self.version_1.id)}')
@@ -2436,7 +2435,7 @@ class TestWorkerRuns(FixtureAPITestCase):
             AssertionError,
             f"Worker Version {version_2.id} is not available and cannot be used to build a task."
         ):
-            run_2.build_task(self.process_1.workflow, 'test', ENV.copy(), 'import', '/data/import/elements.json')
+            run_2.build_task(self.process_1, 'test', ENV.copy(), 'import', '/data/import/elements.json')
 
     def test_build_task_unavailable_model_version(self):
         self.model_version_1.state = ModelVersionState.Created
@@ -2447,4 +2446,4 @@ class TestWorkerRuns(FixtureAPITestCase):
             AssertionError,
             f"ModelVersion {self.model_version_1.id} is not available and cannot be used to build a task."
         ):
-            self.run_1.build_task(self.process_1.workflow, 'test', ENV.copy(), 'import', '/data/import/elements.json')
+            self.run_1.build_task(self.process_1, 'test', ENV.copy(), 'import', '/data/import/elements.json')
diff --git a/arkindex/process/tests/test_workers.py b/arkindex/process/tests/test_workers.py
index 41d3ffba39..4dbaf04950 100644
--- a/arkindex/process/tests/test_workers.py
+++ b/arkindex/process/tests/test_workers.py
@@ -3,7 +3,6 @@ import uuid
 from django.urls import reverse
 from rest_framework import status
 
-from arkindex.ponos.models import Workflow
 from arkindex.process.models import (
     GitRefType,
     ProcessMode,
@@ -15,7 +14,6 @@ from arkindex.process.models import (
     WorkerVersionGPUUsage,
     WorkerVersionState,
 )
-from arkindex.process.utils import get_default_farm_id
 from arkindex.project.tests import FixtureAPITestCase
 from arkindex.users.models import Right, Role, User
 
@@ -51,7 +49,7 @@ class TestWorkersWorkerVersions(FixtureAPITestCase):
 
         process = cls.rev.processes.create(mode=ProcessMode.Repository, creator=cls.user)
         process.start()
-        cls.task = process.workflow.tasks.get()
+        cls.task = process.tasks.get()
 
     def setUp(self):
         super().setUp()
@@ -1386,11 +1384,9 @@ class TestWorkersWorkerVersions(FixtureAPITestCase):
         self.version_1.save()
 
         process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
-        process.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        process.save()
         process.start()
 
-        task = process.workflow.tasks.first()
+        task = process.tasks.first()
         artifact = task.artifacts.create(
             path='path/to/file.json',
             size=100,
@@ -1437,10 +1433,8 @@ class TestWorkersWorkerVersions(FixtureAPITestCase):
 
     def test_update_version_available_docker_image_not_null(self):
         process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
-        process.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        process.save()
         process.start()
-        task = process.workflow.tasks.first()
+        task = process.tasks.first()
         self.version_1.docker_image = task.artifacts.create(
             path='path/to/file.json',
             size=100,
@@ -1464,10 +1458,8 @@ class TestWorkersWorkerVersions(FixtureAPITestCase):
 
     def test_update_version_available_docker_image_iid_not_null(self):
         process = self.corpus.processes.create(creator=self.user, mode=ProcessMode.Workers)
-        process.workflow = Workflow.objects.create(farm_id=get_default_farm_id())
-        process.save()
         process.start()
-        task = process.workflow.tasks.first()
+        task = process.tasks.first()
         self.version_1.docker_image = task.artifacts.create(
             path='path/to/file.json',
             size=100,
diff --git a/arkindex/project/api_v1.py b/arkindex/project/api_v1.py
index da40800eea..a2d9e341b3 100644
--- a/arkindex/project/api_v1.py
+++ b/arkindex/project/api_v1.py
@@ -74,7 +74,6 @@ from arkindex.ponos.api import (
     TaskDefinition,
     TaskDetailsFromAgent,
     TaskUpdate,
-    WorkflowDetails,
 )
 from arkindex.process.api import (
     ApplyProcessTemplate,
@@ -92,7 +91,7 @@ from arkindex.process.api import (
     GitRepositoryImportHook,
     ImportTranskribus,
     ListProcessElements,
-    ProcessEdit,
+    ProcessDetails,
     ProcessList,
     ProcessRetry,
     ProcessWorkersActivity,
@@ -272,7 +271,7 @@ api = [
     path('process/', ProcessList.as_view(), name='process-list'),
     path('process/fromfiles/', FilesProcess.as_view(), name='files-process'),
     path('process/transkribus/', ImportTranskribus.as_view(), name='import-transkribus'),
-    path('process/<uuid:pk>/', ProcessEdit.as_view(), name='process-details'),
+    path('process/<uuid:pk>/', ProcessDetails.as_view(), name='process-details'),
     path('process/<uuid:pk>/retry/', ProcessRetry.as_view(), name='process-retry'),
     path('process/<uuid:pk>/start/', StartProcess.as_view(), name='process-start'),
     path('process/files/<uuid:pk>/', DataFileList.as_view(), name='file-list'),
@@ -343,7 +342,6 @@ api = [
     path('openapi/', OpenApiSchemaView.as_view(), name='openapi-schema'),
 
     # Ponos
-    path("workflow/<uuid:pk>/", WorkflowDetails.as_view(), name="workflow-details"),
     path("task/", TaskCreate.as_view(), name="task-create"),
     path("task/<uuid:pk>/", TaskUpdate.as_view(), name="task-update"),
     path(
diff --git a/arkindex/project/tests/test_ponos_view.py b/arkindex/project/tests/test_ponos_view.py
index fc5895efd9..96cf9f09b2 100644
--- a/arkindex/project/tests/test_ponos_view.py
+++ b/arkindex/project/tests/test_ponos_view.py
@@ -4,8 +4,8 @@ from rest_framework import status
 
 from arkindex.documents.models import Corpus
 from arkindex.ponos.authentication import AgentUser
-from arkindex.ponos.models import Agent, Artifact, Farm, State
-from arkindex.process.models import Process, ProcessMode
+from arkindex.ponos.models import Agent, Artifact, Farm
+from arkindex.process.models import ProcessMode
 from arkindex.project.tests import FixtureAPITestCase
 from arkindex.users.models import Role, User
 
@@ -16,21 +16,20 @@ class TestPonosView(FixtureAPITestCase):
     @classmethod
     def setUpTestData(cls):
         super().setUpTestData()
+        cls.creator = User.objects.create(email="creator@user.me")
+        cls.artifact = Artifact.objects.get(path='/path/to/docker_build')
+        cls.task = cls.artifact.task
+
+        # Assign a corpus to the task's process so we can test the process corpus permissions
         cls.process_corpus = Corpus.objects.create(name='Another public corpus', public=True)
         cls.corpus_admin = User.objects.create(email='corpusadmin@test.me')
         cls.corpus_admin.rights.create(content_object=cls.process_corpus, level=Role.Admin.value)
 
-        cls.creator = User.objects.create(email="creator@user.me")
-        cls.artifact = Artifact.objects.get(path='/path/to/docker_build')
-        cls.task = cls.artifact.task
-        cls.workflow = cls.task.workflow
-        cls.task = cls.workflow.tasks.get()
-        cls.process = Process.objects.create(
-            mode=ProcessMode.Files,
-            creator=cls.creator,
-            workflow=cls.workflow,
-            corpus=cls.process_corpus,
-        )
+        process = cls.task.process
+        process.mode = ProcessMode.Files
+        process.corpus = cls.process_corpus
+        process.save()
+
         cls.agent = Agent.objects.create(
             cpu_cores=3,
             cpu_frequency=3e9,
@@ -101,10 +100,10 @@ class TestPonosView(FixtureAPITestCase):
         """
         test_cases = (
             (None, status.HTTP_403_FORBIDDEN, 0),
-            (self.creator, status.HTTP_403_FORBIDDEN, 9),
-            (self.user, status.HTTP_403_FORBIDDEN, 9),
+            (self.creator, status.HTTP_403_FORBIDDEN, 8),
+            (self.user, status.HTTP_403_FORBIDDEN, 8),
             (self.superuser, status.HTTP_200_OK, 10),
-            (self.corpus_admin, status.HTTP_200_OK, 15),
+            (self.corpus_admin, status.HTTP_200_OK, 14),
         )
         for user, status_code, requests_count in test_cases:
             with self.subTest(user=user):
@@ -124,10 +123,10 @@ class TestPonosView(FixtureAPITestCase):
         """
         test_cases = (
             (None, status.HTTP_403_FORBIDDEN, 0),
-            (self.creator, status.HTTP_403_FORBIDDEN, 10),
-            (self.user, status.HTTP_403_FORBIDDEN, 10),
+            (self.creator, status.HTTP_403_FORBIDDEN, 9),
+            (self.user, status.HTTP_403_FORBIDDEN, 9),
             (self.superuser, status.HTTP_302_FOUND, 4),
-            (self.corpus_admin, status.HTTP_302_FOUND, 10),
+            (self.corpus_admin, status.HTTP_302_FOUND, 9),
         )
         for user, status_code, requests_count in test_cases:
             with self.subTest(user=user):
@@ -160,41 +159,3 @@ class TestPonosView(FixtureAPITestCase):
                 HTTP_AUTHORIZATION="Bearer {}".format(agent_user.token.access_token),
             )
             self.assertEqual(response.status_code, status.HTTP_302_FOUND)
-
-    def test_retrieve_workflow(self):
-        """
-        A user with a guest right on a process cannot retrieve its workflow
-        """
-        self.client.force_login(self.user)
-        self.process_corpus.memberships.create(user=self.user, level=Role.Guest.value)
-        with self.assertNumQueries(10):
-            response = self.client.get(reverse('api:workflow-details', kwargs={'pk': str(self.workflow.id)}))
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
-
-    def test_retry_workflow_requires_admin_access(self):
-        """
-        A user cannot retry a workflow if they have no admin access to their process
-        """
-        self.task.state = State.Running
-        self.task.save()
-        self.client.force_login(self.user)
-        self.process_corpus.memberships.create(user=self.user, level=Role.Guest.value)
-        with self.assertNumQueries(9):
-            response = self.client.patch(
-                reverse('api:workflow-details', kwargs={'pk': str(self.workflow.id)}),
-                {'status': 'stopped'}
-            )
-        self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
-        self.assertEqual(response.json(), {'detail': 'You do not have an admin access to this process.'})
-
-    def test_retry_workflow(self):
-        self.task.state = State.Running
-        self.task.save()
-        self.client.force_login(self.user)
-        self.process_corpus.memberships.create(user=self.user, level=Role.Admin.value)
-        with self.assertNumQueries(15):
-            response = self.client.patch(
-                reverse('api:workflow-details', kwargs={'pk': str(self.workflow.id)}),
-                {'status': 'stopped'}
-            )
-        self.assertEqual(response.status_code, status.HTTP_200_OK)
diff --git a/arkindex/project/validators.py b/arkindex/project/validators.py
index f0559cf6a3..9b50c85e07 100644
--- a/arkindex/project/validators.py
+++ b/arkindex/project/validators.py
@@ -1,3 +1,4 @@
+from django.core import validators
 from rest_framework import serializers
 
 
@@ -62,3 +63,30 @@ class ForbiddenValidator(object):
     def __call__(self, data):
         if data is not serializers.empty:
             raise serializers.ValidationError(self.message)
+
+
+class HiddenCallableValidatorMixin(object):
+    """
+    Implements a workaround for some issues with error messages in DRF
+    and with drf-spectacular OpenAPI schema generation when the `limit_value`
+    of any validator extending django.core.validators.BaseValidator is
+    a callable.  This rewrites `self.limit_value` as a property,
+    which calls the original limit value when it is callable while making
+    Django, DRF and Spectacular believe it isn't callable.
+
+    https://github.com/encode/django-rest-framework/discussions/8833
+    https://github.com/tfranzel/drf-spectacular/issues/913
+    """
+
+    def __init__(self, limit_value, message=None):
+        self._limit_value = limit_value
+        if message:
+            self.message = message
+
+    @property
+    def limit_value(self):
+        return self._limit_value() if callable(self._limit_value) else self._limit_value
+
+
+class MaxValueValidator(HiddenCallableValidatorMixin, validators.MaxValueValidator):
+    pass
diff --git a/arkindex/sql_validation/corpus_delete.sql b/arkindex/sql_validation/corpus_delete.sql
index 88f81609f7..846e7e58ac 100644
--- a/arkindex/sql_validation/corpus_delete.sql
+++ b/arkindex/sql_validation/corpus_delete.sql
@@ -170,7 +170,7 @@ WHERE "documents_elementtype"."corpus_id" = '{corpus_id}'::uuid;
 DELETE
 FROM "users_right"
 WHERE ("users_right"."content_id" = '{corpus_id}'::uuid
-       AND "users_right"."content_type_id" = 21);
+       AND "users_right"."content_type_id" = 20);
 
 DELETE
 FROM "documents_corpusexport"
@@ -184,6 +184,14 @@ WHERE "process_workerrun"."id" IN
          INNER JOIN "process_process" U1 ON (U0."process_id" = U1."id")
          WHERE U1."corpus_id" = '{corpus_id}'::uuid);
 
+DELETE
+FROM "ponos_task"
+WHERE "ponos_task"."id" IN
+        (SELECT U0."id"
+         FROM "ponos_task" U0
+         INNER JOIN "process_process" U1 ON (U0."process_id" = U1."id")
+         WHERE U1."corpus_id" = '{corpus_id}'::uuid);
+
 DELETE
 FROM "process_process"
 WHERE "process_process"."corpus_id" = '{corpus_id}'::uuid;
diff --git a/arkindex/sql_validation/corpus_delete_top_level_type.sql b/arkindex/sql_validation/corpus_delete_top_level_type.sql
index 0e7ae064a9..e1846980bf 100644
--- a/arkindex/sql_validation/corpus_delete_top_level_type.sql
+++ b/arkindex/sql_validation/corpus_delete_top_level_type.sql
@@ -174,7 +174,7 @@ WHERE "documents_elementtype"."corpus_id" = '{corpus_id}'::uuid;
 DELETE
 FROM "users_right"
 WHERE ("users_right"."content_id" = '{corpus_id}'::uuid
-       AND "users_right"."content_type_id" = 21);
+       AND "users_right"."content_type_id" = 20);
 
 DELETE
 FROM "documents_corpusexport"
@@ -188,6 +188,14 @@ WHERE "process_workerrun"."id" IN
          INNER JOIN "process_process" U1 ON (U0."process_id" = U1."id")
          WHERE U1."corpus_id" = '{corpus_id}'::uuid);
 
+DELETE
+FROM "ponos_task"
+WHERE "ponos_task"."id" IN
+        (SELECT U0."id"
+         FROM "ponos_task" U0
+         INNER JOIN "process_process" U1 ON (U0."process_id" = U1."id")
+         WHERE U1."corpus_id" = '{corpus_id}'::uuid);
+
 DELETE
 FROM "process_process"
 WHERE "process_process"."corpus_id" = '{corpus_id}'::uuid;
diff --git a/arkindex/sql_validation/process_elements_filter_type.sql b/arkindex/sql_validation/process_elements_filter_type.sql
index 70dcde44ae..9d48f34492 100644
--- a/arkindex/sql_validation/process_elements_filter_type.sql
+++ b/arkindex/sql_validation/process_elements_filter_type.sql
@@ -21,8 +21,10 @@ SELECT "process_process"."id",
        "process_process"."corpus_id",
        "process_process"."mode",
        "process_process"."revision_id",
-       "process_process"."workflow_id",
        "process_process"."activity_state",
+       "process_process"."started",
+       "process_process"."finished",
+       "process_process"."farm_id",
        "process_process"."element_id",
        "process_process"."folder_type_id",
        "process_process"."element_type_id",
diff --git a/arkindex/sql_validation/process_elements_top_level.sql b/arkindex/sql_validation/process_elements_top_level.sql
index 77379ae7e5..d60707b023 100644
--- a/arkindex/sql_validation/process_elements_top_level.sql
+++ b/arkindex/sql_validation/process_elements_top_level.sql
@@ -21,8 +21,10 @@ SELECT "process_process"."id",
        "process_process"."corpus_id",
        "process_process"."mode",
        "process_process"."revision_id",
-       "process_process"."workflow_id",
        "process_process"."activity_state",
+       "process_process"."started",
+       "process_process"."finished",
+       "process_process"."farm_id",
        "process_process"."element_id",
        "process_process"."folder_type_id",
        "process_process"."element_type_id",
diff --git a/arkindex/sql_validation/process_elements_with_image.sql b/arkindex/sql_validation/process_elements_with_image.sql
index 7822816f4d..d68fb153ba 100644
--- a/arkindex/sql_validation/process_elements_with_image.sql
+++ b/arkindex/sql_validation/process_elements_with_image.sql
@@ -21,8 +21,10 @@ SELECT "process_process"."id",
        "process_process"."corpus_id",
        "process_process"."mode",
        "process_process"."revision_id",
-       "process_process"."workflow_id",
        "process_process"."activity_state",
+       "process_process"."started",
+       "process_process"."finished",
+       "process_process"."farm_id",
        "process_process"."element_id",
        "process_process"."folder_type_id",
        "process_process"."element_type_id",
diff --git a/arkindex/training/tests/test_datasets_api.py b/arkindex/training/tests/test_datasets_api.py
index 4b5f5cf3ec..3006bc573c 100644
--- a/arkindex/training/tests/test_datasets_api.py
+++ b/arkindex/training/tests/test_datasets_api.py
@@ -22,8 +22,7 @@ class TestDatasetsAPI(FixtureAPITestCase):
         super().setUpTestData()
         cls.process = Process.objects.get(mode=ProcessMode.Workers)
         cls.process.start()
-        cls.workflow = cls.process.workflow
-        cls.task = cls.workflow.tasks.first()
+        cls.task = cls.process.tasks.first()
         cls.dataset_creator = User.objects.create(email='adam@nerv.co.jp', display_name='Adam', verified_email=True)
         cls.private_corpus = Corpus.objects.create(name='private', public=False)
         cls.read_user = User.objects.get(email='user3@user.fr')
diff --git a/arkindex/users/models.py b/arkindex/users/models.py
index 364041b2e9..aab8e73c14 100644
--- a/arkindex/users/models.py
+++ b/arkindex/users/models.py
@@ -126,7 +126,7 @@ class User(AbstractBaseUser):
 
     def has_scope(self, scope):
         assert isinstance(scope, Scope), 'Scope should be from the Scope enum'
-        # TODO: Could be optimized in a way similar to arkindex.ponos.models.Workflow.get_state()
+        # TODO: Could be optimized in a way similar to Process.get_state()
         # See https://gitlab.com/teklia/arkindex/ponos/-/blob/e5989f3e1dd1f6d7d93ba940b7ed2c5471ddd34f/ponos/models.py#L370
         return self.user_scopes.filter(scope=scope).exists()
 
-- 
GitLab