From 098024023b5e6f0af238c32bcbdc83c11ee959aa Mon Sep 17 00:00:00 2001 From: Yoann Schneider <yschneider@teklia.com> Date: Wed, 15 May 2024 07:39:15 +0000 Subject: [PATCH] Introduce pyupgrade --- arkindex/documents/date_parser.py | 6 +++--- arkindex/documents/dates.py | 14 +++++++------- arkindex/documents/indexer.py | 2 +- .../management/commands/move_lines_to_parent.py | 9 ++++++--- arkindex/documents/managers.py | 12 ++++++------ arkindex/documents/models.py | 8 +++----- arkindex/documents/serializers/elements.py | 14 +++++++------- arkindex/documents/serializers/ml.py | 5 +---- .../documents/tests/commands/test_load_export.py | 2 +- .../documents/tests/test_bulk_classification.py | 2 +- arkindex/documents/tests/test_classes.py | 2 +- arkindex/documents/tests/test_edit_elementpath.py | 2 +- arkindex/documents/tests/test_element_paths_api.py | 10 +++++----- .../images/management/commands/check_images.py | 4 ++-- arkindex/images/models.py | 10 +++++----- arkindex/images/tests/test_image_api.py | 4 ++-- arkindex/images/views.py | 8 ++++---- arkindex/ponos/models.py | 8 ++++---- arkindex/ponos/serializer_fields.py | 2 +- arkindex/ponos/tasks.py | 6 ++---- arkindex/ponos/tests/test_models.py | 8 ++++---- arkindex/process/api.py | 4 ++-- arkindex/process/builder.py | 2 +- arkindex/process/models.py | 4 ++-- arkindex/process/serializers/imports.py | 2 +- .../process/tests/test_workeractivity_stats.py | 8 ++++---- arkindex/project/argparse.py | 6 +++--- arkindex/project/aws.py | 11 ++++------- arkindex/project/fields.py | 4 ++-- arkindex/project/mixins.py | 8 ++++---- arkindex/project/permissions.py | 2 +- arkindex/project/serializer_fields.py | 2 +- arkindex/project/settings.py | 4 ++-- arkindex/project/tests/__init__.py | 4 ++-- arkindex/project/validators.py | 6 +++--- arkindex/training/api.py | 2 +- .../management/commands/migrate_workers.py | 2 +- arkindex/training/models.py | 5 +---- arkindex/training/tests/test_datasets_api.py | 4 ++-- arkindex/users/api.py | 2 +- arkindex/users/tests/test_jobs.py | 4 ++-- ruff.toml | 2 ++ setup.py | 2 +- 43 files changed, 110 insertions(+), 118 deletions(-) diff --git a/arkindex/documents/date_parser.py b/arkindex/documents/date_parser.py index 20860e2ba6..a12121c56b 100644 --- a/arkindex/documents/date_parser.py +++ b/arkindex/documents/date_parser.py @@ -143,7 +143,7 @@ def instanciate_date(date_elt): try: date.validate() except ValueError as e: - logger.warning("Date fields are incorrect: {}".format(e)) + logger.warning(f"Date fields are incorrect: {e}") raise return date @@ -161,6 +161,6 @@ def parse_date(raw_date, functions_table=DATE_FUNCTIONS_TABLE): if date_elts: return tuple(map(instanciate_date, date_elts)) except Exception: - logger.warning("Failed parsing {} with function {}".format(raw_date, f.__name__)) - logger.warning("Date not supported: {}".format(raw_date)) + logger.warning(f"Failed parsing {raw_date} with function {f.__name__}") + logger.warning(f"Date not supported: {raw_date}") return () diff --git a/arkindex/documents/dates.py b/arkindex/documents/dates.py index dc3012bef8..0c759b7a36 100644 --- a/arkindex/documents/dates.py +++ b/arkindex/documents/dates.py @@ -16,7 +16,7 @@ class DatePrecision(Enum): Day = "d" -class InterpretedDate(object): +class InterpretedDate: def __init__(self, year, month=None, day=None, type=DateType.Exact): self.year = int(year) @@ -26,17 +26,17 @@ class InterpretedDate(object): def validate(self): if self.year < 0: - raise ValueError("Year {} is negative".format(self.year)) + raise ValueError(f"Year {self.year} is negative") if self.month and (self.month < 1 or self.month > 12): - raise ValueError("Month {} is not between 1 and 12".format(self.month)) + raise ValueError(f"Month {self.month} is not between 1 and 12") if self.day and (self.day < 1 or self.day > 31): - raise ValueError("Day {} is not between 1 and 31".format(self.day)) + raise ValueError(f"Day {self.day} is not between 1 and 31") # Check if day is correct depending on year and month if self.precision == DatePrecision.Day: try: datetime(*tuple(self)) except ValueError: - raise ValueError("Date format is incorrect {}".format(self)) + raise ValueError(f"Date format is incorrect {self}") @property def precision(self): @@ -70,10 +70,10 @@ class InterpretedDate(object): return s > o def __str__(self): - return "-".join("{:02d}".format(e) for e in tuple(self) if e) + return "-".join(f"{e:02d}" for e in tuple(self) if e) -class InterpretedDateMixin(object): +class InterpretedDateMixin: """ Adds on-demand date parsing from a text field to InterpretedDates. Requires a `raw_dates` property that returns the date string. diff --git a/arkindex/documents/indexer.py b/arkindex/documents/indexer.py index 203722ec31..37f7e200e4 100644 --- a/arkindex/documents/indexer.py +++ b/arkindex/documents/indexer.py @@ -59,7 +59,7 @@ INNER JOIN documents_elementtype elementtype ON (element.type_id = elementtype.i """ -class Indexer(object): +class Indexer: # The query yielding all the elements to run on will look for all the child elements of all indexable elements # The joins can take a very long time, so the query gets split into one to fetch all the indexable elements, diff --git a/arkindex/documents/management/commands/move_lines_to_parent.py b/arkindex/documents/management/commands/move_lines_to_parent.py index 2927bc6b39..dd393f6113 100644 --- a/arkindex/documents/management/commands/move_lines_to_parent.py +++ b/arkindex/documents/management/commands/move_lines_to_parent.py @@ -6,10 +6,13 @@ from django.core.management.base import BaseCommand, CommandError from arkindex.documents.models import ElementType from arkindex.project.argparse import CorpusArgument + # x and y of top left and bottom right points -BBox = NamedTuple( - "BBox", [("left", int), ("top", int), ("right", int), ("bottom", int)] -) +class BBox(NamedTuple): + left: int + top: int + right: int + bottom: int def compute_polygon_area(polygon: BBox): diff --git a/arkindex/documents/managers.py b/arkindex/documents/managers.py index d09737577c..9ccb6494b8 100644 --- a/arkindex/documents/managers.py +++ b/arkindex/documents/managers.py @@ -47,13 +47,13 @@ class ElementQuerySet(models.QuerySet): # by including the target SQL query, and joining it directly with paths # It's not possible to do that join with Django ORM with connections["default"].cursor() as cursor: - cursor.execute("""select min(length), max(length) FROM ( + cursor.execute(f"""select min(length), max(length) FROM ( select array_length(p.path, 1) as length from documents_elementpath as p inner join - ({}) as input on (array[input.id] && p.path) + ({sql}) as input on (array[input.id] && p.path) ) as lengths - """.format(sql), params) + """, params) min_paths, max_paths = cursor.fetchone() # Postgres will give us None when no children is found @@ -86,16 +86,16 @@ class ElementQuerySet(models.QuerySet): # directly into an SQL DELETE statement for paths # Once paths are deleted, we can finally delete the targeted elements with connections["default"].cursor() as cursor: - cursor.execute(""" + cursor.execute(f""" WITH element_ids (id) AS ( DELETE FROM documents_elementpath - WHERE element_id IN ({}) + WHERE element_id IN ({sql}) RETURNING element_id ) DELETE FROM documents_element element USING element_ids WHERE element.id = element_ids.id - """.format(sql), params) + """, params) # Finally, delete top elements. # Ensure the QuerySet does not use a DISTINCT; it is useless for a deletion, and since Django 3.2, diff --git a/arkindex/documents/models.py b/arkindex/documents/models.py index 5d0109d11b..8f981de972 100644 --- a/arkindex/documents/models.py +++ b/arkindex/documents/models.py @@ -688,7 +688,7 @@ class Element(IndexableModel): ) def __str__(self): - return "{}: {}".format(self.type.display_name, self.name) + return f"{self.type.display_name}: {self.name}" class EntityType(models.Model): @@ -835,7 +835,7 @@ class Transcription(models.Model): ] def __str__(self): - return "Transcription: {}".format(self.text[:20]) + return f"Transcription: {self.text[:20]}" class TranscriptionEntity(models.Model): @@ -1120,9 +1120,7 @@ class MetaData(InterpretedDateMixin, models.Model): if self.entity is None or self.element is None: return if self.entity.corpus != self.element.corpus: - raise ValidationError("Entity's corpus {} is different from the expected corpus {}".format( - self.entity.corpus, - self.element.corpus)) + raise ValidationError(f"Entity's corpus {self.entity.corpus} is different from the expected corpus {self.element.corpus}") def save(self, *args, **kwargs): self.clean() diff --git a/arkindex/documents/serializers/elements.py b/arkindex/documents/serializers/elements.py index d8ba836bed..b7d70eb785 100644 --- a/arkindex/documents/serializers/elements.py +++ b/arkindex/documents/serializers/elements.py @@ -500,7 +500,7 @@ class ElementParentSerializer(serializers.Serializer): parent = data.get("parent") if parent.corpus_id != child.corpus_id: - errors["parent"].append("Parent is not from corpus '{}'".format(child.corpus.name)) + errors["parent"].append(f"Parent is not from corpus '{child.corpus.name}'") if parent.id == child.id: errors["parent"].append("A child cannot be its own parent") if errors: @@ -511,12 +511,12 @@ class ElementParentSerializer(serializers.Serializer): # Assert parent is not an child's ancestor already if ElementPath.objects.filter(element_id=child.id, path__contains=[parent.id]).exists(): raise ValidationError({"parent": [ - "'{}' is already a parent of '{}'".format(parent.id, child.id) + f"'{parent.id}' is already a parent of '{child.id}'" ]}) # Assert parent is not an alement's descendant if ElementPath.objects.filter(element_id=parent.id, path__contains=[child.id]).exists(): raise ValidationError({"parent": [ - "'{}' is a child of element '{}'".format(parent.id, child.id) + f"'{parent.id}' is a child of element '{child.id}'" ]}) def create(self, validated_data): @@ -1006,12 +1006,12 @@ class ElementDestinationSerializer(serializers.Serializer): # Assert destination is not a source's direct ancestor already if ElementPath.objects.filter(element_id=source.id, path__last=destination.id).exists(): raise ValidationError({"destination": [ - "'{}' is already a direct parent of '{}'".format(destination.id, source.id) + f"'{destination.id}' is already a direct parent of '{source.id}'" ]}) # Assert destination is not a source's descendant if ElementPath.objects.filter(element_id=destination.id, path__contains=[source.id]).exists(): raise ValidationError({"destination": [ - "'{}' is a child of element '{}'".format(destination.id, source.id) + f"'{destination.id}' is a child of element '{source.id}'" ]}) @@ -1048,12 +1048,12 @@ class SelectionMoveSerializer(serializers.Serializer, SelectionMixin): # Assert destination is not a source's direct ancestor already if ElementPath.objects.filter(element__corpus_id=corpus.id, element__selections__user_id=self.context["request"].user.id, path__last=destination.id).exists(): raise ValidationError({"destination": [ - "'{}' is already a direct parent of one or more selected elements.".format(destination.id) + f"'{destination.id}' is already a direct parent of one or more selected elements." ]}) # Assert destination is not a source's descendant if destination.paths.filter(path__overlap=Array(selected_elements.values_list("id", flat=True))).exists(): raise ValidationError({"destination": [ - "'{}' is a child of one or more selected elements.".format(destination.id) + f"'{destination.id}' is a child of one or more selected elements." ]}) return data diff --git a/arkindex/documents/serializers/ml.py b/arkindex/documents/serializers/ml.py index ee19f0b8c7..daa3aed4df 100644 --- a/arkindex/documents/serializers/ml.py +++ b/arkindex/documents/serializers/ml.py @@ -254,10 +254,7 @@ class ClassificationsSelectionSerializer(serializers.ModelSerializer): if ml_class is None: raise ValidationError({"ml_class": "Ml class {} not found".format(data["ml_class"])}) if ml_class.corpus.id != corpus.id: - raise ValidationError("Ml class {} does not belong to the corpus {}".format( - ml_class, - corpus - )) + raise ValidationError(f"Ml class {ml_class} does not belong to the corpus {corpus}") return data diff --git a/arkindex/documents/tests/commands/test_load_export.py b/arkindex/documents/tests/commands/test_load_export.py index fe7176f77b..3bf8e45290 100644 --- a/arkindex/documents/tests/commands/test_load_export.py +++ b/arkindex/documents/tests/commands/test_load_export.py @@ -54,7 +54,7 @@ class TestLoadExport(FixtureTestCase): "documents.classification": ["ml_class"], } - with open(path, "r") as file: + with open(path) as file: data = json.loads(file.read()) results = [] diff --git a/arkindex/documents/tests/test_bulk_classification.py b/arkindex/documents/tests/test_bulk_classification.py index ce5ab5e7d7..60f82ea19b 100644 --- a/arkindex/documents/tests/test_bulk_classification.py +++ b/arkindex/documents/tests/test_bulk_classification.py @@ -56,7 +56,7 @@ class TestBulkClassification(FixtureAPITestCase): self.assertDictEqual( response.json(), { - "parent": ['Invalid pk "{}" - object does not exist.'.format(private_page.id)] + "parent": [f'Invalid pk "{private_page.id}" - object does not exist.'] } ) diff --git a/arkindex/documents/tests/test_classes.py b/arkindex/documents/tests/test_classes.py index 0f4f7291a6..20409bbcc5 100644 --- a/arkindex/documents/tests/test_classes.py +++ b/arkindex/documents/tests/test_classes.py @@ -29,7 +29,7 @@ class TestClasses(FixtureAPITestCase): for elt_num in range(1, 13): elt = cls.corpus.elements.create( - name="elt_{}".format(elt_num), + name=f"elt_{elt_num}", type=cls.classified, ) elt.add_parent(cls.parent) diff --git a/arkindex/documents/tests/test_edit_elementpath.py b/arkindex/documents/tests/test_edit_elementpath.py index ac0f945e98..5633763cea 100644 --- a/arkindex/documents/tests/test_edit_elementpath.py +++ b/arkindex/documents/tests/test_edit_elementpath.py @@ -358,7 +358,7 @@ class TestEditElementPath(FixtureTestCase): # B will only have one remaining path: the first path that got picked by remove_child for the update. # The other path will have been deleted. We can therefore get this remaining path and compare it by its ID # to the two paths that we had before, and pick the parent that was in the old version of this path. - class FirstParent(object): + class FirstParent: def __str__(self): path_id = elements["B"].paths.get().id if path1.id == path_id: diff --git a/arkindex/documents/tests/test_element_paths_api.py b/arkindex/documents/tests/test_element_paths_api.py index f71d1298e3..290d2ce63e 100644 --- a/arkindex/documents/tests/test_element_paths_api.py +++ b/arkindex/documents/tests/test_element_paths_api.py @@ -78,7 +78,7 @@ class TestElementsAPI(FixtureAPITestCase): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( response.json(), - {"child": ['Invalid pk "{}" - object does not exist.'.format(self.desk.id)]} + {"child": [f'Invalid pk "{self.desk.id}" - object does not exist.']} ) def test_different_corpus(self): @@ -94,7 +94,7 @@ class TestElementsAPI(FixtureAPITestCase): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( response.json(), - {"parent": ["Parent is not from corpus '{}'".format(self.room.corpus.name)]} + {"parent": [f"Parent is not from corpus '{self.room.corpus.name}'"]} ) def test_own_parent(self): @@ -116,7 +116,7 @@ class TestElementsAPI(FixtureAPITestCase): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( response.json(), - {"parent": ["'{}' is already a parent of '{}'".format(self.room.id, self.desk.id)]} + {"parent": [f"'{self.room.id}' is already a parent of '{self.desk.id}'"]} ) def test_cycles(self): @@ -129,7 +129,7 @@ class TestElementsAPI(FixtureAPITestCase): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( response.json(), - {"parent": ["'{}' is a child of element '{}'".format(self.room.id, self.house.id)]} + {"parent": [f"'{self.room.id}' is a child of element '{self.house.id}'"]} ) def test_delete_forbidden(self): @@ -147,7 +147,7 @@ class TestElementsAPI(FixtureAPITestCase): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( response.json(), - {"child": ['Invalid pk "{}" - object does not exist.'.format(self.desk.id)]} + {"child": [f'Invalid pk "{self.desk.id}" - object does not exist.']} ) def test_delete_relation(self): diff --git a/arkindex/images/management/commands/check_images.py b/arkindex/images/management/commands/check_images.py index a924b16a81..9a630e9c4e 100644 --- a/arkindex/images/management/commands/check_images.py +++ b/arkindex/images/management/commands/check_images.py @@ -59,7 +59,7 @@ class Command(BaseCommand): server_sample = server.images \ .filter(status=S3FileStatus.Checked) \ .order_by("?")[:sample] - self.stdout.write("Re-checking {} images in server {}".format(len(server_sample), server.display_name)) + self.stdout.write(f"Re-checking {len(server_sample)} images in server {server.display_name}") self.check(server_sample) self.check(images) @@ -67,7 +67,7 @@ class Command(BaseCommand): successful, failed = 0, 0 for image in images: - self.stdout.write("Checking image {} at {}".format(str(image.id), image.url)) + self.stdout.write(f"Checking image {str(image.id)} at {image.url}") image.perform_check(save=True) if image.status == S3FileStatus.Checked: successful += 1 diff --git a/arkindex/images/models.py b/arkindex/images/models.py index 8b904acaf9..f2ab13933d 100644 --- a/arkindex/images/models.py +++ b/arkindex/images/models.py @@ -156,11 +156,11 @@ class ImageServer(models.Model): folder_slash = folder.lstrip("/") + "/" images = self.images.filter(path__startswith=folder_slash) if not images.exists(): - raise ValueError('No images are in the "{}" folder'.format(folder)) + raise ValueError(f'No images are in the "{folder}" folder') # Create the new server on the subfolder new_server = ImageServer.objects.create( - display_name="{}_{}".format(self.display_name, slugify(folder)), + display_name=f"{self.display_name}_{slugify(folder)}", url=urllib.parse.urljoin(self.url.lstrip("/") + "/", folder), ) @@ -218,8 +218,8 @@ class Image(S3FileMixin, IndexableModel): if max_width is None and max_height is None: resize = "full" else: - resize = "{0},{1}".format(max_width or "", max_height or "") - return "{0}/full/{1}/0/default.jpg".format(self.url.rstrip("/"), resize) + resize = "{},{}".format(max_width or "", max_height or "") + return "{}/full/{}/0/default.jpg".format(self.url.rstrip("/"), resize) @property def s3_put_url(self): @@ -359,7 +359,7 @@ class Thumbnail(S3FileMixin): @property def name(self): - return "{}.jpg".format(str(self.element.id.hex)) + return f"{str(self.element.id.hex)}.jpg" @cached_property def s3_key(self): diff --git a/arkindex/images/tests/test_image_api.py b/arkindex/images/tests/test_image_api.py index c66e1052c3..edbeb945c1 100644 --- a/arkindex/images/tests/test_image_api.py +++ b/arkindex/images/tests/test_image_api.py @@ -35,7 +35,7 @@ class TestImageApi(FixtureAPITestCase): def setUp(self): super().setUp() # A random 32-character hex string - self.image_hash = "{:032x}".format(random.randrange(16**32)) + self.image_hash = f"{random.randrange(16**32):032x}" def test_create_image_requires_login(self): with self.assertNumQueries(0): @@ -164,7 +164,7 @@ class TestImageApi(FixtureAPITestCase): def test_create_image_unique_path(self): self.client.force_login(self.superuser) existing_image = self.imgsrv.images.create(path="something", hash=self.image_hash) - new_hash = "{:032x}".format(random.randrange(16**32)) + new_hash = f"{random.randrange(16**32):032x}" with self.assertNumQueries(4): response = self.client.post(reverse("api:image-create"), {"hash": new_hash, "path": "something"}) diff --git a/arkindex/images/views.py b/arkindex/images/views.py index b685c604d6..e2f05e3b7f 100644 --- a/arkindex/images/views.py +++ b/arkindex/images/views.py @@ -43,7 +43,7 @@ class ImageServerMergeView(ImageServerAdminView): except ValueError as e: self.model_admin.message_user( self.request, - "Merging failed: {}".format(str(e)), + f"Merging failed: {str(e)}", level=messages.ERROR, ) return super().form_valid(form) @@ -52,7 +52,7 @@ class ImageServerMergeView(ImageServerAdminView): self.model_admin.message_user( self.request, - "Successfully merged {} into {}".format(source, destination), + f"Successfully merged {source} into {destination}", level=messages.SUCCESS, ) return super().form_valid(form) @@ -70,14 +70,14 @@ class ImageServerSplitView(ImageServerAdminView): except ValueError as e: self.model_admin.message_user( self.request, - "Splitting failed: {}".format(str(e)), + f"Splitting failed: {str(e)}", level=messages.ERROR, ) return super().form_valid(form) self.model_admin.message_user( self.request, - 'Successfully split "{}" from {} into {}'.format(folder, self.server, new_server), + f'Successfully split "{folder}" from {self.server} into {new_server}', level=messages.SUCCESS, ) return super().form_valid(form) diff --git a/arkindex/ponos/models.py b/arkindex/ponos/models.py index 76a91f8a15..1618e3e632 100644 --- a/arkindex/ponos/models.py +++ b/arkindex/ponos/models.py @@ -22,7 +22,7 @@ from arkindex.project.validators import MaxValueValidator def generate_seed() -> str: - return "{:064x}".format(random.getrandbits(256)) + return f"{random.getrandbits(256):064x}" def gen_nonce(size=16): @@ -60,7 +60,7 @@ class Farm(models.Model): ] def __str__(self) -> str: - return "Farm {}".format(self.name) + return f"Farm {self.name}" def is_available(self, user) -> bool: return True @@ -250,7 +250,7 @@ class TaskLogs(S3FileMixin): try: log_bytes = self.s3_object.get( - Range="bytes=-{}".format(max_length), + Range=f"bytes=-{max_length}", )["Body"].read() except ClientError as e: @@ -376,7 +376,7 @@ class Task(models.Model): ] def __str__(self) -> str: - return "Task {}, run {}, depth {}".format(self.slug, self.run, self.depth) + return f"Task {self.slug}, run {self.run}, depth {self.depth}" def get_absolute_url(self) -> str: """ diff --git a/arkindex/ponos/serializer_fields.py b/arkindex/ponos/serializer_fields.py index 2823133392..1bd83378c8 100644 --- a/arkindex/ponos/serializer_fields.py +++ b/arkindex/ponos/serializer_fields.py @@ -56,7 +56,7 @@ class Base64Field(serializers.CharField): return base64.b64encode(obj) -class CurrentProcessDefault(object): +class CurrentProcessDefault: """ Use the process of the currently authenticated task as a default value. If Ponos task authentication is not in use, returns None. diff --git a/arkindex/ponos/tasks.py b/arkindex/ponos/tasks.py index b4744c1a01..f568d59eaa 100644 --- a/arkindex/ponos/tasks.py +++ b/arkindex/ponos/tasks.py @@ -181,14 +181,12 @@ def run_docker_task(client, task, temp_dir): # 4. Download extra_files if task.extra_files: - logger.info("Downloading extra_files for task {!s}".format(task)) + logger.info(f"Downloading extra_files for task {task!s}") try: download_extra_files(task, temp_dir) except Exception as e: logger.warning( - "Failed downloading extra_files for task {!s}: {!s}".format( - task, e - ) + f"Failed downloading extra_files for task {task!s}: {e!s}" ) task.state = State.Error task.save() diff --git a/arkindex/ponos/tests/test_models.py b/arkindex/ponos/tests/test_models.py index 8492b07765..8b8ff878e1 100644 --- a/arkindex/ponos/tests/test_models.py +++ b/arkindex/ponos/tests/test_models.py @@ -30,17 +30,17 @@ class TestModels(FixtureAPITestCase): self.task1.save() if state in FINAL_STATES: self.assertTrue( - self.task1.is_final(), msg="{} should be final".format(state) + self.task1.is_final(), msg=f"{state} should be final" ) self.assertTrue( - self.process.is_final, msg="{} should be final".format(state) + self.process.is_final, msg=f"{state} should be final" ) else: self.assertFalse( - self.task1.is_final(), msg="{} should not be final".format(state) + self.task1.is_final(), msg=f"{state} should not be final" ) self.assertFalse( - self.process.is_final, msg="{} should not be final".format(state) + self.process.is_final, msg=f"{state} should not be final" ) def test_requires_gpu(self): diff --git a/arkindex/process/api.py b/arkindex/process/api.py index f527305661..09703baa6e 100644 --- a/arkindex/process/api.py +++ b/arkindex/process/api.py @@ -189,7 +189,7 @@ class ProcessList(ProcessACLMixin, ListAPIView): try: corpus_id = UUID(corpus_id) except (AttributeError, ValueError): - raise ValidationError({"corpus": ["'{}' is not a valid UUID".format(corpus_id)]}) + raise ValidationError({"corpus": [f"'{corpus_id}' is not a valid UUID"]}) # No supplementary validation is required on the corpus ID filter filters &= Q(corpus=corpus_id) @@ -270,7 +270,7 @@ class ProcessList(ProcessACLMixin, ListAPIView): return qs.order_by("-date_order") -class ProcessQuerysetMixin(object): +class ProcessQuerysetMixin: """ Optimized queryset for Retrieve/Update/PartialUpdate/Destroy/RetryProcess """ diff --git a/arkindex/process/builder.py b/arkindex/process/builder.py index 74619b170e..af735708e2 100644 --- a/arkindex/process/builder.py +++ b/arkindex/process/builder.py @@ -16,7 +16,7 @@ from arkindex.images.models import ImageServer from arkindex.ponos.models import GPU, Task, task_token_default -class ProcessBuilder(object): +class ProcessBuilder: def __init__(self, process) -> None: self.process = process diff --git a/arkindex/process/models.py b/arkindex/process/models.py index 8e8dff5625..1e21d56ef2 100644 --- a/arkindex/process/models.py +++ b/arkindex/process/models.py @@ -586,8 +586,8 @@ class Revision(IndexableModel): return "{}/commit/{}".format(self.repo.url.rstrip("/"), self.hash) def __str__(self): - message = ' "{}"'.format(self.message.splitlines()[0]) if self.message else "" - return "{}{} by {}".format(self.hash[:8], message, self.author) + message = f' "{self.message.splitlines()[0]}"' if self.message else "" + return f"{self.hash[:8]}{message} by {self.author}" class Worker(models.Model): diff --git a/arkindex/process/serializers/imports.py b/arkindex/process/serializers/imports.py index 23e6017546..2b7ae9c8a8 100644 --- a/arkindex/process/serializers/imports.py +++ b/arkindex/process/serializers/imports.py @@ -670,7 +670,7 @@ class CorpusProcessSerializer(serializers.Serializer): # If element is defined ensure it is part of the right corpus if element and element.corpus_id != corpus.id: raise ValidationError({ - "element": ["Element is not part of corpus {}".format(corpus.name)] + "element": [f"Element is not part of corpus {corpus.name}"] }) # Check type filter is valid diff --git a/arkindex/process/tests/test_workeractivity_stats.py b/arkindex/process/tests/test_workeractivity_stats.py index 3e8e292a6e..66e584db32 100644 --- a/arkindex/process/tests/test_workeractivity_stats.py +++ b/arkindex/process/tests/test_workeractivity_stats.py @@ -86,7 +86,7 @@ class TestWorkerActivityStats(FixtureAPITestCase): with pgtrigger.ignore("process.WorkerActivity:read_only_workeractivity_updated", "process.WorkerActivity:update_workeractivity_updated"): WorkerActivity.objects.filter(element__corpus_id=cls.corpus.id).update(created=Now(), updated=Now(), started=Now()) - cls.error, cls.processed, cls.queued, cls.started = [ + cls.error, cls.processed, cls.queued, cls.started = ( WorkerActivity.objects.filter( element__corpus_id=cls.corpus.id, worker_version_id=cls.version_1.id, @@ -98,9 +98,9 @@ class TestWorkerActivityStats(FixtureAPITestCase): WorkerActivityState.Queued, WorkerActivityState.Started ] - ] + ) - cls.error_2, cls.processed_2, cls.queued_2, cls.started_2 = [ + cls.error_2, cls.processed_2, cls.queued_2, cls.started_2 = ( WorkerActivity.objects.filter( element__corpus_id=cls.corpus.id, worker_version_id=cls.version_3.id, @@ -112,7 +112,7 @@ class TestWorkerActivityStats(FixtureAPITestCase): WorkerActivityState.Queued, WorkerActivityState.Started ] - ] + ) def test_corpus_requires_login(self): with self.assertNumQueries(0): diff --git a/arkindex/project/argparse.py b/arkindex/project/argparse.py index bbb3dcf17c..d308011199 100644 --- a/arkindex/project/argparse.py +++ b/arkindex/project/argparse.py @@ -7,7 +7,7 @@ from arkindex.process.models import Process, Repository, WorkerVersion from arkindex.users.models import User -class ModelArgument(object): +class ModelArgument: model = None text_search_field = "name" text_search_lookup = "icontains" @@ -27,9 +27,9 @@ class ModelArgument(object): try: return self.text_search(qs, arg) except self.model.DoesNotExist: - raise CommandError('{} "{}" does not exist'.format(self.model.__name__, arg)) + raise CommandError(f'{self.model.__name__} "{arg}" does not exist') except self.model.MultipleObjectsReturned: - raise CommandError('"{}" matches multiple {} instances'.format(arg, self.model.__name__)) + raise CommandError(f'"{arg}" matches multiple {self.model.__name__} instances') def text_search(self, qs, arg): if not self.text_search_field: diff --git a/arkindex/project/aws.py b/arkindex/project/aws.py index 659f37326a..ac98c858d6 100644 --- a/arkindex/project/aws.py +++ b/arkindex/project/aws.py @@ -88,7 +88,7 @@ def _retry_delete_predicate(exception): ) -class S3FileMixin(object): +class S3FileMixin: def get_s3_object(self): if not self.s3_bucket or not self.s3_key: @@ -142,12 +142,12 @@ class S3FileMixin(object): def download(self): b = BytesIO() - logger.debug("Downloading file {} from S3".format(self.s3_key)) + logger.debug(f"Downloading file {self.s3_key} from S3") self.s3_object.download_fileobj(b) return b def download_to(self, path): - logger.debug("Downloading file {} from S3".format(self.s3_key)) + logger.debug(f"Downloading file {self.s3_key} from S3") self.s3_object.download_file(path) def check_hash(self, save=True, raise_exc=False): @@ -162,10 +162,7 @@ class S3FileMixin(object): elif "-" in self.s3_object.e_tag: # Multipart hash: a hash of each part's hash, # combined with the number of parts, separated by a dash - logger.warning("Could not check remote multipart hash {!r} against local hash {!r}".format( - self.s3_object.e_tag, - self.hash, - )) + logger.warning(f"Could not check remote multipart hash {self.s3_object.e_tag!r} against local hash {self.hash!r}") self.status = S3FileStatus.Unchecked else: self.status = S3FileStatus.Error diff --git a/arkindex/project/fields.py b/arkindex/project/fields.py index e66f28bb13..4af2a16a3d 100644 --- a/arkindex/project/fields.py +++ b/arkindex/project/fields.py @@ -120,14 +120,14 @@ class LastItemTransform(Transform): def as_sql(self, compiler, connection): lhs, params = compiler.compile(self.lhs) - return "%s[array_length(%s, 1)]" % (lhs, lhs), params + return f"{lhs}[array_length({lhs}, 1)]", params @property def output_field(self): return self.base_field -class LastItemTransformFactory(object): +class LastItemTransformFactory: """ Create a LastItemTransform with a given base field """ diff --git a/arkindex/project/mixins.py b/arkindex/project/mixins.py index 6f04942aa2..744da257c3 100644 --- a/arkindex/project/mixins.py +++ b/arkindex/project/mixins.py @@ -13,7 +13,7 @@ from arkindex.users.models import Role from arkindex.users.utils import filter_rights, get_max_level, has_access -class ACLMixin(object): +class ACLMixin: """ Access control mixin using the generic Right table. """ @@ -140,7 +140,7 @@ class ProcessACLMixin(ACLMixin): return get_max_level(self.user, process.corpus) -class SelectionMixin(object): +class SelectionMixin: def get_selection(self, corpus_id=None): assert settings.ARKINDEX_FEATURES["selection"], "Selection feature is unavailable" @@ -165,7 +165,7 @@ class DeprecatedAPIException(APIException): default_code = "deprecated" -class DeprecatedMixin(object): +class DeprecatedMixin: # Add this mixin to an APIView to make it deprecated. serializer_class = DeprecatedExceptionSerializer @@ -196,7 +196,7 @@ class DeprecatedMixin(object): raise DeprecatedAPIException(detail=getattr(self, "deprecation_message", None)) -class CachedViewMixin(object): +class CachedViewMixin: """ Add this mixin to any class-based view to cache it. """ diff --git a/arkindex/project/permissions.py b/arkindex/project/permissions.py index de38572783..ee36346dcc 100644 --- a/arkindex/project/permissions.py +++ b/arkindex/project/permissions.py @@ -17,7 +17,7 @@ def require_verified_email(request, view): def _get_scopes(request, view): - specific_scopes_attr = "{}_scopes".format(request.method.lower()) + specific_scopes_attr = f"{request.method.lower()}_scopes" scopes = list(getattr(view, "scopes", [])) scopes.extend(getattr(view, specific_scopes_attr, [])) return set(scopes) diff --git a/arkindex/project/serializer_fields.py b/arkindex/project/serializer_fields.py index 06245b86fc..9a16ad4402 100644 --- a/arkindex/project/serializer_fields.py +++ b/arkindex/project/serializer_fields.py @@ -35,7 +35,7 @@ class EnumField(serializers.ChoiceField): try: return self.enum(data) except ValueError: - raise serializers.ValidationError("Value is not of type {}".format(self.enum.__name__)) + raise serializers.ValidationError(f"Value is not of type {self.enum.__name__}") class PointField(serializers.ListField): diff --git a/arkindex/project/settings.py b/arkindex/project/settings.py index dbd5a3b43d..1a3cee8d36 100644 --- a/arkindex/project/settings.py +++ b/arkindex/project/settings.py @@ -414,7 +414,7 @@ LOGGING = { } # Email -EMAIL_SUBJECT_PREFIX = "[Arkindex {}] ".format(ARKINDEX_ENV) +EMAIL_SUBJECT_PREFIX = f"[Arkindex {ARKINDEX_ENV}] " if conf["email"]: ADMINS = [("", address) for address in conf["email"]["error_report_recipients"]] EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" @@ -460,7 +460,7 @@ CORS_URLS_REGEX = r"^/(api|ponos)/.*$" # Support CORS suffixes if conf["cors"]["suffixes"]: CORS_ALLOWED_ORIGIN_REGEXES = [ - r"^https://.+{}".format(suffix) + rf"^https://.+{suffix}" for suffix in conf["cors"]["suffixes"] ] diff --git a/arkindex/project/tests/__init__.py b/arkindex/project/tests/__init__.py index cca5e05eb7..ecceb69a5e 100644 --- a/arkindex/project/tests/__init__.py +++ b/arkindex/project/tests/__init__.py @@ -67,7 +67,7 @@ class _AssertExactQueriesContext(CaptureQueriesContext): if not self.path.exists(): try: self.path.write_text(actual_sql) - except IOError as e: + except OSError as e: raise AssertionError( f"Could not assert on SQL queries; the file at {self.path} did not exist. " "A new file could not be created with the current SQL queries." @@ -96,7 +96,7 @@ class _AssertExactQueriesContext(CaptureQueriesContext): self.test_case.assertEqual(expected_sql, actual_sql) -class FixtureMixin(object): +class FixtureMixin: """ Add the database fixture to a test case """ diff --git a/arkindex/project/validators.py b/arkindex/project/validators.py index 15fb732317..980e263686 100644 --- a/arkindex/project/validators.py +++ b/arkindex/project/validators.py @@ -2,7 +2,7 @@ from django.core import validators from rest_framework import serializers -class XorValidator(object): +class XorValidator: """ A generic validator for when two fields can't be used simultaneously """ @@ -45,7 +45,7 @@ class ConditionalUniqueValidator(serializers.UniqueTogetherValidator): return super().__call__(attrs, serializer) -class ForbiddenValidator(object): +class ForbiddenValidator: """ A validator that will show an error message any time a value is set. @@ -65,7 +65,7 @@ class ForbiddenValidator(object): raise serializers.ValidationError(self.message) -class HiddenCallableValidatorMixin(object): +class HiddenCallableValidatorMixin: """ Implements a workaround for some issues with error messages in DRF and with drf-spectacular OpenAPI schema generation when the `limit_value` diff --git a/arkindex/training/api.py b/arkindex/training/api.py index c7d7220ae9..77289fab01 100644 --- a/arkindex/training/api.py +++ b/arkindex/training/api.py @@ -737,7 +737,7 @@ class DatasetElementCursorPagination(CountCursorPagination): ordering = ("element_id", "id") -class DatasetSetBase(): +class DatasetSetBase: permission_classes = (IsVerified, ) serializer_class = DatasetSetSerializer diff --git a/arkindex/training/management/commands/migrate_workers.py b/arkindex/training/management/commands/migrate_workers.py index 6e011fe5a6..e499d74e2f 100644 --- a/arkindex/training/management/commands/migrate_workers.py +++ b/arkindex/training/management/commands/migrate_workers.py @@ -46,7 +46,7 @@ def choose(instances, name_field="name", title="Pick one item", allow_skip=False choices["0"] = choices["skip"] = None for i, (id, name) in enumerate(items, 1): print(f"{i}: {id} {name}") - choices[str((i))] = choices[str(id)] = choices[name] = id + choices[str(i)] = choices[str(id)] = choices[name] = id # Get the first valid choice while True: diff --git a/arkindex/training/models.py b/arkindex/training/models.py index ea84bd5c8a..028ed4071c 100644 --- a/arkindex/training/models.py +++ b/arkindex/training/models.py @@ -169,10 +169,7 @@ class ModelVersion(S3FileMixin, IndexableModel): elif "-" in self.s3_object.e_tag: # Multipart hash: a hash of each part's hash, # combined with the number of parts, separated by a dash - logger.warning("Could not check remote multipart hash {!r} against local hash {!r}".format( - self.s3_object.e_tag, - self.archive_hash, - )) + logger.warning(f"Could not check remote multipart hash {self.s3_object.e_tag!r} against local hash {self.archive_hash!r}") self.state = ModelVersionState.Available else: self.state = ModelVersionState.Error diff --git a/arkindex/training/tests/test_datasets_api.py b/arkindex/training/tests/test_datasets_api.py index ce1d83ce0a..b824eae2ee 100644 --- a/arkindex/training/tests/test_datasets_api.py +++ b/arkindex/training/tests/test_datasets_api.py @@ -657,7 +657,7 @@ class TestDatasetsAPI(FixtureAPITestCase): def test_update_ponos_task_state_forbidden(self): """Dataset's state update is limited to specific transitions""" - op, build, complete, error = [DatasetState[state] for state in ("Open", "Building", "Complete", "Error")] + op, build, complete, error = (DatasetState[state] for state in ("Open", "Building", "Complete", "Error")) states = { (op, op): True, (op, build) : True, @@ -930,7 +930,7 @@ class TestDatasetsAPI(FixtureAPITestCase): def test_partial_update_ponos_task_state_forbidden(self): """Dataset's state update is limited to specific transitions""" - op, build, complete, error = [DatasetState[state] for state in ("Open", "Building", "Complete", "Error")] + op, build, complete, error = (DatasetState[state] for state in ("Open", "Building", "Complete", "Error")) states = { (op, op): True, (op, build) : True, diff --git a/arkindex/users/api.py b/arkindex/users/api.py index 467fd2e728..5bc55e012c 100644 --- a/arkindex/users/api.py +++ b/arkindex/users/api.py @@ -121,7 +121,7 @@ class UserCreate(CreateAPIView): fail_silently=True, ) if sent == 0: - logger.error("Failed to send registration email to {}".format(user.email)) + logger.error(f"Failed to send registration email to {user.email}") return Response(UserSerializer(user).data, status=status.HTTP_201_CREATED) diff --git a/arkindex/users/tests/test_jobs.py b/arkindex/users/tests/test_jobs.py index 9e4da6d5ca..8dd4fb11a7 100644 --- a/arkindex/users/tests/test_jobs.py +++ b/arkindex/users/tests/test_jobs.py @@ -10,7 +10,7 @@ from rq.job import JobStatus from arkindex.project.tests import FixtureAPITestCase -class MockedJob(object): +class MockedJob: def __init__(self, id=None, user_id=None, status=JobStatus.QUEUED, **kwargs): self.id = id or str(uuid4()) @@ -32,7 +32,7 @@ class MockedJob(object): @classmethod def key_for(cls, id): - return f"rq:job:{id}".encode("utf-8") + return f"rq:job:{id}".encode() class TestJobs(FixtureAPITestCase): diff --git a/ruff.toml b/ruff.toml index 943a2b2ee4..8a02c65695 100644 --- a/ruff.toml +++ b/ruff.toml @@ -25,6 +25,8 @@ select = [ "RET", # eradicate "ERA", + # pyupgrade + "UP", ] ignore = ["E501", "RET502", "RET503"] diff --git a/setup.py b/setup.py index 39f437cdac..fdd40bb80d 100755 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ def _parse_requirement(line): def requirements(path): - assert os.path.exists(path), "Missing requirements {}".format(path) + assert os.path.exists(path), f"Missing requirements {path}" with open(path) as f: return list(map(_parse_requirement, f.read().splitlines())) -- GitLab