diff --git a/arkindex_worker/cache.py b/arkindex_worker/cache.py index c757a5ef2dfa13723ffa2cdc558ab2c27a4a470b..76e80f1f2148c397fd350699b8f052a4c56d4b7e 100644 --- a/arkindex_worker/cache.py +++ b/arkindex_worker/cache.py @@ -374,3 +374,17 @@ def merge_parents_cache(paths: list, current_database: Path): for statement in statements: cursor.execute(statement) connection.commit() + + +def unsupported_cache(func): + def wrapper(self, *args, **kwargs): + results = func(self, *args, **kwargs) + + if not (self.is_read_only or self.use_cache): + logger.warning( + f"This API helper `{func.__name__}` did not update the cache database" + ) + + return results + + return wrapper diff --git a/arkindex_worker/worker/dataset.py b/arkindex_worker/worker/dataset.py index be088ef422d33aeab9a26d26a7cee1fd1461fa21..86c36708aa34bd1344820490a72da0c3097bfda2 100644 --- a/arkindex_worker/worker/dataset.py +++ b/arkindex_worker/worker/dataset.py @@ -6,6 +6,7 @@ from collections.abc import Iterator from enum import Enum from arkindex_worker import logger +from arkindex_worker.cache import unsupported_cache from arkindex_worker.models import Dataset, Element @@ -68,6 +69,7 @@ class DatasetMixin: return map(format_result, list(results)) + @unsupported_cache def update_dataset_state(self, dataset: Dataset, state: DatasetState) -> Dataset: """ Partially updates a dataset state through the API. diff --git a/arkindex_worker/worker/element.py b/arkindex_worker/worker/element.py index 0ab3724208825973e48dcc2b34f1c6e92076ccda..22b8b9c61785ff7a391854f3b62ed26363b431f8 100644 --- a/arkindex_worker/worker/element.py +++ b/arkindex_worker/worker/element.py @@ -9,7 +9,7 @@ from warnings import warn from peewee import IntegrityError from arkindex_worker import logger -from arkindex_worker.cache import CachedElement, CachedImage +from arkindex_worker.cache import CachedElement, CachedImage, unsupported_cache from arkindex_worker.models import Element @@ -30,6 +30,7 @@ class MissingTypeError(Exception): class ElementMixin: + @unsupported_cache def create_required_types(self, element_types: list[ElementType]): """Creates given element types in the corpus. @@ -82,6 +83,7 @@ class ElementMixin: return True + @unsupported_cache def create_sub_element( self, element: Element, @@ -284,6 +286,7 @@ class ElementMixin: return created_ids + @unsupported_cache def create_element_parent( self, parent: Element, diff --git a/arkindex_worker/worker/entity.py b/arkindex_worker/worker/entity.py index 07fd8a6de67e9eb173a2928dcfd7a32e68641e01..ce7e004b795fb554e1b3cd170a9faf266b83523b 100644 --- a/arkindex_worker/worker/entity.py +++ b/arkindex_worker/worker/entity.py @@ -9,7 +9,11 @@ from warnings import warn from peewee import IntegrityError from arkindex_worker import logger -from arkindex_worker.cache import CachedEntity, CachedTranscriptionEntity +from arkindex_worker.cache import ( + CachedEntity, + CachedTranscriptionEntity, + unsupported_cache, +) from arkindex_worker.models import Element, Transcription @@ -29,6 +33,7 @@ class MissingEntityType(Exception): class EntityMixin: + @unsupported_cache def check_required_entity_types( self, entity_types: list[str], create_missing: bool = True ): @@ -206,6 +211,7 @@ class EntityMixin: ) return transcription_ent + @unsupported_cache def create_transcription_entities( self, transcription: Transcription, diff --git a/arkindex_worker/worker/metadata.py b/arkindex_worker/worker/metadata.py index 75e4d6b751f663aa7a8bcc14d75a60fd2d5fadb7..2a822bbf0f1c0331ad2657103489df997c74ebff 100644 --- a/arkindex_worker/worker/metadata.py +++ b/arkindex_worker/worker/metadata.py @@ -5,7 +5,7 @@ ElementsWorker methods for metadata. from enum import Enum from arkindex_worker import logger -from arkindex_worker.cache import CachedElement +from arkindex_worker.cache import CachedElement, unsupported_cache from arkindex_worker.models import Element @@ -56,6 +56,7 @@ class MetaType(Enum): class MetaDataMixin: + @unsupported_cache def create_metadata( self, element: Element | CachedElement, @@ -106,6 +107,7 @@ class MetaDataMixin: return metadata["id"] + @unsupported_cache def create_metadatas( self, element: Element | CachedElement, diff --git a/tests/test_dataset_worker.py b/tests/test_dataset_worker.py index ac303e89e17d210b184b658b39ba53ea04706354..c8bb9ebe02d8894fe1a94e70706b6c05d6806a8d 100644 --- a/tests/test_dataset_worker.py +++ b/tests/test_dataset_worker.py @@ -470,10 +470,17 @@ def test_run_initial_dataset_state_error( logging.WARNING, f"Failed running worker on dataset dataset_id: AssertionError('{error}')", ), - ( - logging.ERROR, - "Ran on 1 datasets: 0 completed, 1 failed", - ), + ] + ( + [ + ( + logging.WARNING, + "This API helper `update_dataset_state` did not update the cache database", + ) + ] + if generator + else [] + ) + [ + (logging.ERROR, "Ran on 1 datasets: 0 completed, 1 failed"), ] @@ -697,7 +704,15 @@ def test_run( ] * 2 extra_logs += [ (logging.INFO, "Building Dataset (dataset_id) (1/1)"), + ( + logging.WARNING, + "This API helper `update_dataset_state` did not update the cache database", + ), (logging.INFO, "Completed Dataset (dataset_id) (1/1)"), + ( + logging.WARNING, + "This API helper `update_dataset_state` did not update the cache database", + ), ] else: archive_path = (