diff --git a/arkindex_worker/worker.py b/arkindex_worker/worker.py index d25badd54c38168c48083ac235f8f25a819b3840..ad80249540299214e5f26d9ea1c6c6570a7ae95c 100644 --- a/arkindex_worker/worker.py +++ b/arkindex_worker/worker.py @@ -536,20 +536,25 @@ class ElementsWorker(BaseWorker): self.report.add_transcription(element.id) - # Store transcription in local cache - try: - to_insert = [ - CachedTranscription( - id=convert_str_uuid_to_hex(created["id"]), - element_id=convert_str_uuid_to_hex(element.id), - text=created["text"], - confidence=created["confidence"], - worker_version_id=convert_str_uuid_to_hex(self.worker_version_id), + if self.use_cache: + # Store transcription in local cache + try: + to_insert = [ + CachedTranscription( + id=convert_str_uuid_to_hex(created["id"]), + element_id=convert_str_uuid_to_hex(element.id), + text=created["text"], + confidence=created["confidence"], + worker_version_id=convert_str_uuid_to_hex( + self.worker_version_id + ), + ) + ] + self.cache.insert("transcriptions", to_insert) + except sqlite3.IntegrityError as e: + logger.warning( + f"Couldn't save created transcription in local cache: {e}" ) - ] - self.cache.insert("transcriptions", to_insert) - except sqlite3.IntegrityError as e: - logger.warning(f"Couldn't save created transcription in local cache: {e}") def create_classification( self, element, ml_class, confidence, high_confidence=False @@ -720,21 +725,25 @@ class ElementsWorker(BaseWorker): }, ) - created_ids = [] - elements_to_insert = [] - transcriptions_to_insert = [] - parent_id_hex = convert_str_uuid_to_hex(element.id) - worker_version_id_hex = convert_str_uuid_to_hex(self.worker_version_id) - for index, annotation in enumerate(annotations): - transcription = transcriptions[index] - element_id_hex = convert_str_uuid_to_hex(annotation["id"]) + for annotation in annotations: if annotation["created"]: logger.debug( f"A sub_element of {element.id} with type {sub_element_type} was created during transcriptions bulk creation" ) self.report.add_element(element.id, sub_element_type) + self.report.add_transcription(annotation["id"]) - if annotation["id"] not in created_ids: + if self.use_cache: + # Store transcriptions and their associated element (if created) in local cache + created_ids = [] + elements_to_insert = [] + transcriptions_to_insert = [] + parent_id_hex = convert_str_uuid_to_hex(element.id) + worker_version_id_hex = convert_str_uuid_to_hex(self.worker_version_id) + for index, annotation in enumerate(annotations): + transcription = transcriptions[index] + element_id_hex = convert_str_uuid_to_hex(annotation["id"]) + if annotation["created"] and annotation["id"] not in created_ids: # TODO: Retrieve real element_name through API elements_to_insert.append( CachedElement( @@ -748,25 +757,24 @@ class ElementsWorker(BaseWorker): ) created_ids.append(annotation["id"]) - self.report.add_transcription(annotation["id"]) - - transcriptions_to_insert.append( - CachedTranscription( - # TODO: Retrieve real transcription_id through API - id=convert_str_uuid_to_hex(uuid.uuid4()), - element_id=element_id_hex, - text=transcription["text"], - confidence=transcription["score"], - worker_version_id=worker_version_id_hex, + transcriptions_to_insert.append( + CachedTranscription( + # TODO: Retrieve real transcription_id through API + id=convert_str_uuid_to_hex(uuid.uuid4()), + element_id=element_id_hex, + text=transcription["text"], + confidence=transcription["score"], + worker_version_id=worker_version_id_hex, + ) ) - ) - # Store transcriptions and their associated element (if created) in local cache - try: - self.cache.insert("elements", elements_to_insert) - self.cache.insert("transcriptions", transcriptions_to_insert) - except sqlite3.IntegrityError as e: - logger.warning(f"Couldn't save created transcriptions in local cache: {e}") + try: + self.cache.insert("elements", elements_to_insert) + self.cache.insert("transcriptions", transcriptions_to_insert) + except sqlite3.IntegrityError as e: + logger.warning( + f"Couldn't save created transcriptions in local cache: {e}" + ) return annotations diff --git a/tests/data/cache/lines.sqlite b/tests/data/cache/lines.sqlite index 4294b9b0f3789629fb466debb0d8931f589f9c59..e7024d74adf29e30a46addfcc0b4c55a3203a3a1 100644 Binary files a/tests/data/cache/lines.sqlite and b/tests/data/cache/lines.sqlite differ diff --git a/tests/data/cache/tables.sqlite b/tests/data/cache/tables.sqlite index be29e25631f18d386ad8c644ead09e91792a3d43..b4347205395dd22dba0ce3446f3260d2cd7cb0bb 100644 Binary files a/tests/data/cache/tables.sqlite and b/tests/data/cache/tables.sqlite differ diff --git a/tests/test_cache.py b/tests/test_cache.py index fde2c681189a99fdce2476f38533fb91ebcb7285..9564655f71f8465525b7996ed9048649e11e35d8 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -6,7 +6,7 @@ from pathlib import Path import pytest -from arkindex_worker.cache import CachedElement, LocalDB +from arkindex_worker.cache import CachedElement, CachedTranscription, LocalDB from arkindex_worker.utils import convert_str_uuid_to_hex FIXTURES = Path(__file__).absolute().parent / "data/cache" @@ -32,6 +32,26 @@ ELEMENTS_TO_INSERT = [ ), ), ] +TRANSCRIPTIONS_TO_INSERT = [ + CachedTranscription( + id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"), + element_id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"), + text="Hello!", + confidence=0.42, + worker_version_id=convert_str_uuid_to_hex( + "56785678-5678-5678-5678-567856785678" + ), + ), + CachedTranscription( + id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"), + element_id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"), + text="How are you?", + confidence=0.42, + worker_version_id=convert_str_uuid_to_hex( + "56785678-5678-5678-5678-567856785678" + ), + ), +] def test_init_non_existent_path(): @@ -110,6 +130,10 @@ def test_insert_existing_lines(): cache.insert("elements", ELEMENTS_TO_INSERT) assert str(e.value) == "UNIQUE constraint failed: elements.id" + with pytest.raises(sqlite3.IntegrityError) as e: + cache.insert("transcriptions", TRANSCRIPTIONS_TO_INSERT) + assert str(e.value) == "UNIQUE constraint failed: transcriptions.id" + with open(db_path, "rb") as after_file: after = after_file.read() @@ -130,3 +154,16 @@ def test_insert(): ) assert [CachedElement(**dict(row)) for row in generated_rows] == ELEMENTS_TO_INSERT + + cache.insert("transcriptions", TRANSCRIPTIONS_TO_INSERT) + generated_rows = cache.cursor.execute("SELECT * FROM transcriptions").fetchall() + + expected_cache = LocalDB(f"{FIXTURES}/lines.sqlite") + assert ( + generated_rows + == expected_cache.cursor.execute("SELECT * FROM transcriptions").fetchall() + ) + + assert [ + CachedTranscription(**dict(row)) for row in generated_rows + ] == TRANSCRIPTIONS_TO_INSERT