diff --git a/arkindex/documents/date_parser.py b/arkindex/documents/date_parser.py
index 20860e2ba63fa23dc9e50867d829962e8bf9409d..a12121c56bd60a1ca4c7be66e9ad51987a22cfdc 100644
--- a/arkindex/documents/date_parser.py
+++ b/arkindex/documents/date_parser.py
@@ -143,7 +143,7 @@ def instanciate_date(date_elt):
     try:
         date.validate()
     except ValueError as e:
-        logger.warning("Date fields are incorrect: {}".format(e))
+        logger.warning(f"Date fields are incorrect: {e}")
         raise
     return date
 
@@ -161,6 +161,6 @@ def parse_date(raw_date, functions_table=DATE_FUNCTIONS_TABLE):
             if date_elts:
                 return tuple(map(instanciate_date, date_elts))
         except Exception:
-            logger.warning("Failed parsing {} with function {}".format(raw_date, f.__name__))
-    logger.warning("Date not supported: {}".format(raw_date))
+            logger.warning(f"Failed parsing {raw_date} with function {f.__name__}")
+    logger.warning(f"Date not supported: {raw_date}")
     return ()
diff --git a/arkindex/documents/dates.py b/arkindex/documents/dates.py
index 6e0cdd57f797dcddd7ea6a12b8caf35e20890632..0c759b7a360cacaa1956fdee67582f84f5408f08 100644
--- a/arkindex/documents/dates.py
+++ b/arkindex/documents/dates.py
@@ -26,17 +26,17 @@ class InterpretedDate:
 
     def validate(self):
         if self.year < 0:
-            raise ValueError("Year {} is negative".format(self.year))
+            raise ValueError(f"Year {self.year} is negative")
         if self.month and (self.month < 1 or self.month > 12):
-            raise ValueError("Month {} is not between 1 and 12".format(self.month))
+            raise ValueError(f"Month {self.month} is not between 1 and 12")
         if self.day and (self.day < 1 or self.day > 31):
-            raise ValueError("Day {} is not between 1 and 31".format(self.day))
+            raise ValueError(f"Day {self.day} is not between 1 and 31")
         # Check if day is correct depending on year and month
         if self.precision == DatePrecision.Day:
             try:
                 datetime(*tuple(self))
             except ValueError:
-                raise ValueError("Date format is incorrect {}".format(self))
+                raise ValueError(f"Date format is incorrect {self}")
 
     @property
     def precision(self):
@@ -70,7 +70,7 @@ class InterpretedDate:
             return s > o
 
     def __str__(self):
-        return "-".join("{:02d}".format(e) for e in tuple(self) if e)
+        return "-".join(f"{e:02d}" for e in tuple(self) if e)
 
 
 class InterpretedDateMixin:
diff --git a/arkindex/documents/management/commands/move_lines_to_parent.py b/arkindex/documents/management/commands/move_lines_to_parent.py
index 42229e194b96967cd25e59d956cc80c7ec1ccfe9..dd393f6113eddb474b595e1f9ff8861177413f9b 100644
--- a/arkindex/documents/management/commands/move_lines_to_parent.py
+++ b/arkindex/documents/management/commands/move_lines_to_parent.py
@@ -6,6 +6,7 @@ from django.core.management.base import BaseCommand, CommandError
 from arkindex.documents.models import ElementType
 from arkindex.project.argparse import CorpusArgument
 
+
 # x and y of top left and bottom right points
 class BBox(NamedTuple):
     left: int
diff --git a/arkindex/documents/managers.py b/arkindex/documents/managers.py
index d09737577c645dfa2e5be78165f3b1cd76e7df0e..9ccb6494b871c79f1e221b5f1569de4b0ce62015 100644
--- a/arkindex/documents/managers.py
+++ b/arkindex/documents/managers.py
@@ -47,13 +47,13 @@ class ElementQuerySet(models.QuerySet):
             # by including the target SQL query, and joining it directly with paths
             # It's not possible to do that join with Django ORM
             with connections["default"].cursor() as cursor:
-                cursor.execute("""select min(length), max(length) FROM (
+                cursor.execute(f"""select min(length), max(length) FROM (
                         select array_length(p.path, 1) as length
                         from documents_elementpath as p
                         inner join
-                        ({}) as input on (array[input.id] && p.path)
+                        ({sql}) as input on (array[input.id] && p.path)
                 ) as lengths
-                """.format(sql), params)
+                """, params)
                 min_paths, max_paths = cursor.fetchone()
 
             # Postgres will give us None when no children is found
@@ -86,16 +86,16 @@ class ElementQuerySet(models.QuerySet):
         # directly into an SQL DELETE statement for paths
         # Once paths are deleted, we can finally delete the targeted elements
         with connections["default"].cursor() as cursor:
-            cursor.execute("""
+            cursor.execute(f"""
             WITH element_ids (id) AS (
                 DELETE FROM documents_elementpath
-                WHERE element_id IN ({})
+                WHERE element_id IN ({sql})
                 RETURNING element_id
             )
             DELETE FROM documents_element element
             USING element_ids
             WHERE element.id = element_ids.id
-            """.format(sql), params)
+            """, params)
 
         # Finally, delete top elements.
         # Ensure the QuerySet does not use a DISTINCT; it is useless for a deletion, and since Django 3.2,
diff --git a/arkindex/documents/models.py b/arkindex/documents/models.py
index 046b66237cdce8f70938c3b9fac2968ff4a57651..2a906714ad368e6696af1b88cc4b3f58d8b761ed 100644
--- a/arkindex/documents/models.py
+++ b/arkindex/documents/models.py
@@ -677,7 +677,7 @@ class Element(IndexableModel):
         )
 
     def __str__(self):
-        return "{}: {}".format(self.type.display_name, self.name)
+        return f"{self.type.display_name}: {self.name}"
 
 
 class EntityType(models.Model):
@@ -824,7 +824,7 @@ class Transcription(models.Model):
         ]
 
     def __str__(self):
-        return "Transcription: {}".format(self.text[:20])
+        return f"Transcription: {self.text[:20]}"
 
 
 class TranscriptionEntity(models.Model):
@@ -1109,9 +1109,7 @@ class MetaData(InterpretedDateMixin, models.Model):
         if self.entity is None or self.element is None:
             return
         if self.entity.corpus != self.element.corpus:
-            raise ValidationError("Entity's corpus {} is different from the expected corpus {}".format(
-                self.entity.corpus,
-                self.element.corpus))
+            raise ValidationError(f"Entity's corpus {self.entity.corpus} is different from the expected corpus {self.element.corpus}")
 
     def save(self, *args, **kwargs):
         self.clean()
diff --git a/arkindex/documents/serializers/elements.py b/arkindex/documents/serializers/elements.py
index d8ba836bed4af52fe35ef1d7c072fe6d0f890914..b7d70eb7852d42b9023af21f01e714b19b54d1be 100644
--- a/arkindex/documents/serializers/elements.py
+++ b/arkindex/documents/serializers/elements.py
@@ -500,7 +500,7 @@ class ElementParentSerializer(serializers.Serializer):
         parent = data.get("parent")
 
         if parent.corpus_id != child.corpus_id:
-            errors["parent"].append("Parent is not from corpus '{}'".format(child.corpus.name))
+            errors["parent"].append(f"Parent is not from corpus '{child.corpus.name}'")
         if parent.id == child.id:
             errors["parent"].append("A child cannot be its own parent")
         if errors:
@@ -511,12 +511,12 @@ class ElementParentSerializer(serializers.Serializer):
         # Assert parent is not an child's ancestor already
         if ElementPath.objects.filter(element_id=child.id, path__contains=[parent.id]).exists():
             raise ValidationError({"parent": [
-                "'{}' is already a parent of '{}'".format(parent.id, child.id)
+                f"'{parent.id}' is already a parent of '{child.id}'"
             ]})
         # Assert parent is not an alement's descendant
         if ElementPath.objects.filter(element_id=parent.id, path__contains=[child.id]).exists():
             raise ValidationError({"parent": [
-                "'{}' is a child of element '{}'".format(parent.id, child.id)
+                f"'{parent.id}' is a child of element '{child.id}'"
             ]})
 
     def create(self, validated_data):
@@ -1006,12 +1006,12 @@ class ElementDestinationSerializer(serializers.Serializer):
         # Assert destination is not a source's direct ancestor already
         if ElementPath.objects.filter(element_id=source.id, path__last=destination.id).exists():
             raise ValidationError({"destination": [
-                "'{}' is already a direct parent of '{}'".format(destination.id, source.id)
+                f"'{destination.id}' is already a direct parent of '{source.id}'"
             ]})
         # Assert destination is not a source's descendant
         if ElementPath.objects.filter(element_id=destination.id, path__contains=[source.id]).exists():
             raise ValidationError({"destination": [
-                "'{}' is a child of element '{}'".format(destination.id, source.id)
+                f"'{destination.id}' is a child of element '{source.id}'"
             ]})
 
 
@@ -1048,12 +1048,12 @@ class SelectionMoveSerializer(serializers.Serializer, SelectionMixin):
         # Assert destination is not a source's direct ancestor already
         if ElementPath.objects.filter(element__corpus_id=corpus.id, element__selections__user_id=self.context["request"].user.id, path__last=destination.id).exists():
             raise ValidationError({"destination": [
-                "'{}' is already a direct parent of one or more selected elements.".format(destination.id)
+                f"'{destination.id}' is already a direct parent of one or more selected elements."
             ]})
         # Assert destination is not a source's descendant
         if destination.paths.filter(path__overlap=Array(selected_elements.values_list("id", flat=True))).exists():
             raise ValidationError({"destination": [
-                "'{}' is a child of one or more selected elements.".format(destination.id)
+                f"'{destination.id}' is a child of one or more selected elements."
             ]})
 
         return data
diff --git a/arkindex/documents/serializers/ml.py b/arkindex/documents/serializers/ml.py
index ee19f0b8c7d865f45a9d85b06a2e1f1412c2b517..daa3aed4dfe14d93e447c2d2b3ea5f6f0833957f 100644
--- a/arkindex/documents/serializers/ml.py
+++ b/arkindex/documents/serializers/ml.py
@@ -254,10 +254,7 @@ class ClassificationsSelectionSerializer(serializers.ModelSerializer):
             if ml_class is None:
                 raise ValidationError({"ml_class": "Ml class {} not found".format(data["ml_class"])})
             if ml_class.corpus.id != corpus.id:
-                raise ValidationError("Ml class {} does not belong to the corpus {}".format(
-                    ml_class,
-                    corpus
-                ))
+                raise ValidationError(f"Ml class {ml_class} does not belong to the corpus {corpus}")
         return data
 
 
diff --git a/arkindex/documents/tests/test_bulk_classification.py b/arkindex/documents/tests/test_bulk_classification.py
index ce5ab5e7d739c82a33ee9bbbcff77d2f338af1c7..60f82ea19b14bc9891496c53e80a88cb54ba1595 100644
--- a/arkindex/documents/tests/test_bulk_classification.py
+++ b/arkindex/documents/tests/test_bulk_classification.py
@@ -56,7 +56,7 @@ class TestBulkClassification(FixtureAPITestCase):
         self.assertDictEqual(
             response.json(),
             {
-                "parent": ['Invalid pk "{}" - object does not exist.'.format(private_page.id)]
+                "parent": [f'Invalid pk "{private_page.id}" - object does not exist.']
             }
         )
 
diff --git a/arkindex/documents/tests/test_classes.py b/arkindex/documents/tests/test_classes.py
index 0f4f7291a6541755497efbeb562c160a5d55762f..20409bbcc53b93f679a3e3562b6ad42b178815f3 100644
--- a/arkindex/documents/tests/test_classes.py
+++ b/arkindex/documents/tests/test_classes.py
@@ -29,7 +29,7 @@ class TestClasses(FixtureAPITestCase):
 
         for elt_num in range(1, 13):
             elt = cls.corpus.elements.create(
-                name="elt_{}".format(elt_num),
+                name=f"elt_{elt_num}",
                 type=cls.classified,
             )
             elt.add_parent(cls.parent)
diff --git a/arkindex/documents/tests/test_element_paths_api.py b/arkindex/documents/tests/test_element_paths_api.py
index f71d1298e392a981604640bd7992651106c434a3..290d2ce63ee22a2c9874b1f6cffd460156fe9f05 100644
--- a/arkindex/documents/tests/test_element_paths_api.py
+++ b/arkindex/documents/tests/test_element_paths_api.py
@@ -78,7 +78,7 @@ class TestElementsAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(
             response.json(),
-            {"child": ['Invalid pk "{}" - object does not exist.'.format(self.desk.id)]}
+            {"child": [f'Invalid pk "{self.desk.id}" - object does not exist.']}
         )
 
     def test_different_corpus(self):
@@ -94,7 +94,7 @@ class TestElementsAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(
             response.json(),
-            {"parent": ["Parent is not from corpus '{}'".format(self.room.corpus.name)]}
+            {"parent": [f"Parent is not from corpus '{self.room.corpus.name}'"]}
         )
 
     def test_own_parent(self):
@@ -116,7 +116,7 @@ class TestElementsAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(
             response.json(),
-            {"parent": ["'{}' is already a parent of '{}'".format(self.room.id, self.desk.id)]}
+            {"parent": [f"'{self.room.id}' is already a parent of '{self.desk.id}'"]}
         )
 
     def test_cycles(self):
@@ -129,7 +129,7 @@ class TestElementsAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(
             response.json(),
-            {"parent": ["'{}' is a child of element '{}'".format(self.room.id, self.house.id)]}
+            {"parent": [f"'{self.room.id}' is a child of element '{self.house.id}'"]}
         )
 
     def test_delete_forbidden(self):
@@ -147,7 +147,7 @@ class TestElementsAPI(FixtureAPITestCase):
         self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
         self.assertDictEqual(
             response.json(),
-            {"child": ['Invalid pk "{}" - object does not exist.'.format(self.desk.id)]}
+            {"child": [f'Invalid pk "{self.desk.id}" - object does not exist.']}
         )
 
     def test_delete_relation(self):
diff --git a/arkindex/images/management/commands/check_images.py b/arkindex/images/management/commands/check_images.py
index a924b16a81e424a54625e00ecdab36277df99a38..9a630e9c4ec9e160caaae793b1564728de910718 100644
--- a/arkindex/images/management/commands/check_images.py
+++ b/arkindex/images/management/commands/check_images.py
@@ -59,7 +59,7 @@ class Command(BaseCommand):
                 server_sample = server.images \
                                       .filter(status=S3FileStatus.Checked) \
                                       .order_by("?")[:sample]
-                self.stdout.write("Re-checking {} images in server {}".format(len(server_sample), server.display_name))
+                self.stdout.write(f"Re-checking {len(server_sample)} images in server {server.display_name}")
                 self.check(server_sample)
         self.check(images)
 
@@ -67,7 +67,7 @@ class Command(BaseCommand):
         successful, failed = 0, 0
 
         for image in images:
-            self.stdout.write("Checking image {} at {}".format(str(image.id), image.url))
+            self.stdout.write(f"Checking image {str(image.id)} at {image.url}")
             image.perform_check(save=True)
             if image.status == S3FileStatus.Checked:
                 successful += 1
diff --git a/arkindex/images/models.py b/arkindex/images/models.py
index 9e5e4270098cc8b9f931633152db2d772c9a342c..f2ab13933deb0a645586c3ad543c9b9ff9bf574f 100644
--- a/arkindex/images/models.py
+++ b/arkindex/images/models.py
@@ -156,11 +156,11 @@ class ImageServer(models.Model):
         folder_slash = folder.lstrip("/") + "/"
         images = self.images.filter(path__startswith=folder_slash)
         if not images.exists():
-            raise ValueError('No images are in the "{}" folder'.format(folder))
+            raise ValueError(f'No images are in the "{folder}" folder')
 
         # Create the new server on the subfolder
         new_server = ImageServer.objects.create(
-            display_name="{}_{}".format(self.display_name, slugify(folder)),
+            display_name=f"{self.display_name}_{slugify(folder)}",
             url=urllib.parse.urljoin(self.url.lstrip("/") + "/", folder),
         )
 
@@ -359,7 +359,7 @@ class Thumbnail(S3FileMixin):
 
     @property
     def name(self):
-        return "{}.jpg".format(str(self.element.id.hex))
+        return f"{str(self.element.id.hex)}.jpg"
 
     @cached_property
     def s3_key(self):
diff --git a/arkindex/images/tests/test_image_api.py b/arkindex/images/tests/test_image_api.py
index c66e1052c3ef9cf03e417d91edf82c188ac46237..edbeb945c12a910c501a4e0dd17d71b97602827d 100644
--- a/arkindex/images/tests/test_image_api.py
+++ b/arkindex/images/tests/test_image_api.py
@@ -35,7 +35,7 @@ class TestImageApi(FixtureAPITestCase):
     def setUp(self):
         super().setUp()
         # A random 32-character hex string
-        self.image_hash = "{:032x}".format(random.randrange(16**32))
+        self.image_hash = f"{random.randrange(16**32):032x}"
 
     def test_create_image_requires_login(self):
         with self.assertNumQueries(0):
@@ -164,7 +164,7 @@ class TestImageApi(FixtureAPITestCase):
     def test_create_image_unique_path(self):
         self.client.force_login(self.superuser)
         existing_image = self.imgsrv.images.create(path="something", hash=self.image_hash)
-        new_hash = "{:032x}".format(random.randrange(16**32))
+        new_hash = f"{random.randrange(16**32):032x}"
 
         with self.assertNumQueries(4):
             response = self.client.post(reverse("api:image-create"), {"hash": new_hash, "path": "something"})
diff --git a/arkindex/images/views.py b/arkindex/images/views.py
index b685c604d6e76ef9f9d85b7e50a11a6c5629c125..e2f05e3b7fb3525d76b96ab8b1299346ef765e9a 100644
--- a/arkindex/images/views.py
+++ b/arkindex/images/views.py
@@ -43,7 +43,7 @@ class ImageServerMergeView(ImageServerAdminView):
         except ValueError as e:
             self.model_admin.message_user(
                 self.request,
-                "Merging failed: {}".format(str(e)),
+                f"Merging failed: {str(e)}",
                 level=messages.ERROR,
             )
             return super().form_valid(form)
@@ -52,7 +52,7 @@ class ImageServerMergeView(ImageServerAdminView):
 
         self.model_admin.message_user(
             self.request,
-            "Successfully merged {} into {}".format(source, destination),
+            f"Successfully merged {source} into {destination}",
             level=messages.SUCCESS,
         )
         return super().form_valid(form)
@@ -70,14 +70,14 @@ class ImageServerSplitView(ImageServerAdminView):
         except ValueError as e:
             self.model_admin.message_user(
                 self.request,
-                "Splitting failed: {}".format(str(e)),
+                f"Splitting failed: {str(e)}",
                 level=messages.ERROR,
             )
             return super().form_valid(form)
 
         self.model_admin.message_user(
             self.request,
-            'Successfully split "{}" from {} into {}'.format(folder, self.server, new_server),
+            f'Successfully split "{folder}" from {self.server} into {new_server}',
             level=messages.SUCCESS,
         )
         return super().form_valid(form)
diff --git a/arkindex/ponos/models.py b/arkindex/ponos/models.py
index 76a91f8a152761d566d025ff9f2558f3db98fd6f..1618e3e632a74dd7e8d0a0b973a260d13a78750f 100644
--- a/arkindex/ponos/models.py
+++ b/arkindex/ponos/models.py
@@ -22,7 +22,7 @@ from arkindex.project.validators import MaxValueValidator
 
 
 def generate_seed() -> str:
-    return "{:064x}".format(random.getrandbits(256))
+    return f"{random.getrandbits(256):064x}"
 
 
 def gen_nonce(size=16):
@@ -60,7 +60,7 @@ class Farm(models.Model):
         ]
 
     def __str__(self) -> str:
-        return "Farm {}".format(self.name)
+        return f"Farm {self.name}"
 
     def is_available(self, user) -> bool:
         return True
@@ -250,7 +250,7 @@ class TaskLogs(S3FileMixin):
 
         try:
             log_bytes = self.s3_object.get(
-                Range="bytes=-{}".format(max_length),
+                Range=f"bytes=-{max_length}",
             )["Body"].read()
 
         except ClientError as e:
@@ -376,7 +376,7 @@ class Task(models.Model):
         ]
 
     def __str__(self) -> str:
-        return "Task {}, run {}, depth {}".format(self.slug, self.run, self.depth)
+        return f"Task {self.slug}, run {self.run}, depth {self.depth}"
 
     def get_absolute_url(self) -> str:
         """
diff --git a/arkindex/ponos/tasks.py b/arkindex/ponos/tasks.py
index b4744c1a0134e57e14ea402fce88598b88ec57ed..f568d59eaaa807681a9c52ca656175e9e450aad9 100644
--- a/arkindex/ponos/tasks.py
+++ b/arkindex/ponos/tasks.py
@@ -181,14 +181,12 @@ def run_docker_task(client, task, temp_dir):
 
     # 4. Download extra_files
     if task.extra_files:
-        logger.info("Downloading extra_files for task {!s}".format(task))
+        logger.info(f"Downloading extra_files for task {task!s}")
         try:
             download_extra_files(task, temp_dir)
         except Exception as e:
             logger.warning(
-                "Failed downloading extra_files for task {!s}: {!s}".format(
-                    task, e
-                )
+                f"Failed downloading extra_files for task {task!s}: {e!s}"
             )
             task.state = State.Error
             task.save()
diff --git a/arkindex/ponos/tests/test_models.py b/arkindex/ponos/tests/test_models.py
index 8492b0776579a0a771efcfc9d045bf8cdc1182a5..8b8ff878e1b94e8065db4134bba56ac3f6f62def 100644
--- a/arkindex/ponos/tests/test_models.py
+++ b/arkindex/ponos/tests/test_models.py
@@ -30,17 +30,17 @@ class TestModels(FixtureAPITestCase):
             self.task1.save()
             if state in FINAL_STATES:
                 self.assertTrue(
-                    self.task1.is_final(), msg="{} should be final".format(state)
+                    self.task1.is_final(), msg=f"{state} should be final"
                 )
                 self.assertTrue(
-                    self.process.is_final, msg="{} should be final".format(state)
+                    self.process.is_final, msg=f"{state} should be final"
                 )
             else:
                 self.assertFalse(
-                    self.task1.is_final(), msg="{} should not be final".format(state)
+                    self.task1.is_final(), msg=f"{state} should not be final"
                 )
                 self.assertFalse(
-                    self.process.is_final, msg="{} should not be final".format(state)
+                    self.process.is_final, msg=f"{state} should not be final"
                 )
 
     def test_requires_gpu(self):
diff --git a/arkindex/process/api.py b/arkindex/process/api.py
index 6b6e2416bf7dd8acb5729bae13f54091e02ae709..09703baa6e5ba1d0e679837deef2ee6da6fbc82d 100644
--- a/arkindex/process/api.py
+++ b/arkindex/process/api.py
@@ -189,7 +189,7 @@ class ProcessList(ProcessACLMixin, ListAPIView):
             try:
                 corpus_id = UUID(corpus_id)
             except (AttributeError, ValueError):
-                raise ValidationError({"corpus": ["'{}' is not a valid UUID".format(corpus_id)]})
+                raise ValidationError({"corpus": [f"'{corpus_id}' is not a valid UUID"]})
             # No supplementary validation is required on the corpus ID filter
             filters &= Q(corpus=corpus_id)
 
diff --git a/arkindex/process/models.py b/arkindex/process/models.py
index 8e8dff5625624eb63d18d28b6c3644ac7f529396..1e21d56ef2d584a93ebbaaf71e7ed3ccf8fe71ab 100644
--- a/arkindex/process/models.py
+++ b/arkindex/process/models.py
@@ -586,8 +586,8 @@ class Revision(IndexableModel):
         return "{}/commit/{}".format(self.repo.url.rstrip("/"), self.hash)
 
     def __str__(self):
-        message = ' "{}"'.format(self.message.splitlines()[0]) if self.message else ""
-        return "{}{} by {}".format(self.hash[:8], message, self.author)
+        message = f' "{self.message.splitlines()[0]}"' if self.message else ""
+        return f"{self.hash[:8]}{message} by {self.author}"
 
 
 class Worker(models.Model):
diff --git a/arkindex/process/serializers/imports.py b/arkindex/process/serializers/imports.py
index 23e601754682282d108caba5b82ca3d5cea3180f..2b7ae9c8a8fb7f5fa8ce382449649ebf2fc7e5e2 100644
--- a/arkindex/process/serializers/imports.py
+++ b/arkindex/process/serializers/imports.py
@@ -670,7 +670,7 @@ class CorpusProcessSerializer(serializers.Serializer):
         # If element is defined ensure it is part of the right corpus
         if element and element.corpus_id != corpus.id:
             raise ValidationError({
-                "element": ["Element is not part of corpus {}".format(corpus.name)]
+                "element": [f"Element is not part of corpus {corpus.name}"]
             })
 
         # Check type filter is valid
diff --git a/arkindex/project/argparse.py b/arkindex/project/argparse.py
index 66cbd6946973ecba1d31638689fcbe2644d5c474..d308011199035225000a71e18d1ec98bfdafe4ec 100644
--- a/arkindex/project/argparse.py
+++ b/arkindex/project/argparse.py
@@ -27,9 +27,9 @@ class ModelArgument:
         try:
             return self.text_search(qs, arg)
         except self.model.DoesNotExist:
-            raise CommandError('{} "{}" does not exist'.format(self.model.__name__, arg))
+            raise CommandError(f'{self.model.__name__} "{arg}" does not exist')
         except self.model.MultipleObjectsReturned:
-            raise CommandError('"{}" matches multiple {} instances'.format(arg, self.model.__name__))
+            raise CommandError(f'"{arg}" matches multiple {self.model.__name__} instances')
 
     def text_search(self, qs, arg):
         if not self.text_search_field:
diff --git a/arkindex/project/aws.py b/arkindex/project/aws.py
index 1bde384677f18d635a5268a8618136d21156570a..ac98c858d6c621278ace564a9d2e674143b78de1 100644
--- a/arkindex/project/aws.py
+++ b/arkindex/project/aws.py
@@ -142,12 +142,12 @@ class S3FileMixin:
 
     def download(self):
         b = BytesIO()
-        logger.debug("Downloading file {} from S3".format(self.s3_key))
+        logger.debug(f"Downloading file {self.s3_key} from S3")
         self.s3_object.download_fileobj(b)
         return b
 
     def download_to(self, path):
-        logger.debug("Downloading file {} from S3".format(self.s3_key))
+        logger.debug(f"Downloading file {self.s3_key} from S3")
         self.s3_object.download_file(path)
 
     def check_hash(self, save=True, raise_exc=False):
@@ -162,10 +162,7 @@ class S3FileMixin:
         elif "-" in self.s3_object.e_tag:
             # Multipart hash: a hash of each part's hash,
             # combined with the number of parts, separated by a dash
-            logger.warning("Could not check remote multipart hash {!r} against local hash {!r}".format(
-                self.s3_object.e_tag,
-                self.hash,
-            ))
+            logger.warning(f"Could not check remote multipart hash {self.s3_object.e_tag!r} against local hash {self.hash!r}")
             self.status = S3FileStatus.Unchecked
         else:
             self.status = S3FileStatus.Error
diff --git a/arkindex/project/permissions.py b/arkindex/project/permissions.py
index de38572783b7ae84e1632c49bcaa875b9153fa75..ee36346dcc7e4665aa298be8b715c699d2de75f8 100644
--- a/arkindex/project/permissions.py
+++ b/arkindex/project/permissions.py
@@ -17,7 +17,7 @@ def require_verified_email(request, view):
 
 
 def _get_scopes(request, view):
-    specific_scopes_attr = "{}_scopes".format(request.method.lower())
+    specific_scopes_attr = f"{request.method.lower()}_scopes"
     scopes = list(getattr(view, "scopes", []))
     scopes.extend(getattr(view, specific_scopes_attr, []))
     return set(scopes)
diff --git a/arkindex/project/serializer_fields.py b/arkindex/project/serializer_fields.py
index 06245b86fc0ef12f757846b56b71927b684d87bd..9a16ad44029637872353df68fbffa3d2cd303430 100644
--- a/arkindex/project/serializer_fields.py
+++ b/arkindex/project/serializer_fields.py
@@ -35,7 +35,7 @@ class EnumField(serializers.ChoiceField):
         try:
             return self.enum(data)
         except ValueError:
-            raise serializers.ValidationError("Value is not of type {}".format(self.enum.__name__))
+            raise serializers.ValidationError(f"Value is not of type {self.enum.__name__}")
 
 
 class PointField(serializers.ListField):
diff --git a/arkindex/project/settings.py b/arkindex/project/settings.py
index ee1cd960077868fdf019eff6cd76fefba3fe7ff4..928955b1c6158692ec70e83ad331a0d2513a57d3 100644
--- a/arkindex/project/settings.py
+++ b/arkindex/project/settings.py
@@ -413,7 +413,7 @@ LOGGING = {
 }
 
 # Email
-EMAIL_SUBJECT_PREFIX = "[Arkindex {}] ".format(ARKINDEX_ENV)
+EMAIL_SUBJECT_PREFIX = f"[Arkindex {ARKINDEX_ENV}] "
 if conf["email"]:
     ADMINS = [("", address) for address in conf["email"]["error_report_recipients"]]
     EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
@@ -459,7 +459,7 @@ CORS_URLS_REGEX = r"^/(api|ponos)/.*$"
 # Support CORS suffixes
 if conf["cors"]["suffixes"]:
     CORS_ALLOWED_ORIGIN_REGEXES = [
-        r"^https://.+{}".format(suffix)
+        rf"^https://.+{suffix}"
         for suffix in conf["cors"]["suffixes"]
     ]
 
diff --git a/arkindex/training/models.py b/arkindex/training/models.py
index ea84bd5c8af8588881188e9580dba7bd64b519ac..028ed4071c90bf32a9ecf10911868187d9527f2c 100644
--- a/arkindex/training/models.py
+++ b/arkindex/training/models.py
@@ -169,10 +169,7 @@ class ModelVersion(S3FileMixin, IndexableModel):
         elif "-" in self.s3_object.e_tag:
             # Multipart hash: a hash of each part's hash,
             # combined with the number of parts, separated by a dash
-            logger.warning("Could not check remote multipart hash {!r} against local hash {!r}".format(
-                self.s3_object.e_tag,
-                self.archive_hash,
-            ))
+            logger.warning(f"Could not check remote multipart hash {self.s3_object.e_tag!r} against local hash {self.archive_hash!r}")
             self.state = ModelVersionState.Available
         else:
             self.state = ModelVersionState.Error
diff --git a/arkindex/users/api.py b/arkindex/users/api.py
index 467fd2e7283111cbab05450426c502e5c3ad0ee7..5bc55e012c13abb97df750e9469ab14de34420a2 100644
--- a/arkindex/users/api.py
+++ b/arkindex/users/api.py
@@ -121,7 +121,7 @@ class UserCreate(CreateAPIView):
             fail_silently=True,
         )
         if sent == 0:
-            logger.error("Failed to send registration email to {}".format(user.email))
+            logger.error(f"Failed to send registration email to {user.email}")
 
         return Response(UserSerializer(user).data, status=status.HTTP_201_CREATED)
 
diff --git a/setup.py b/setup.py
index 39f437cdac5d6275d6dfacd06032a7f5f7e0819c..fdd40bb80d4261c25a3fc17db605abceb2aa2cba 100755
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@ def _parse_requirement(line):
 
 
 def requirements(path):
-    assert os.path.exists(path), "Missing requirements {}".format(path)
+    assert os.path.exists(path), f"Missing requirements {path}"
     with open(path) as f:
         return list(map(_parse_requirement, f.read().splitlines()))