# -*- coding: utf-8 -*- import json import os import sqlite3 from pathlib import Path import pytest from arkindex_worker.cache import CachedElement, LocalDB from arkindex_worker.utils import convert_str_uuid_to_hex FIXTURES = Path(__file__).absolute().parent / "data/cache" ELEMENTS_TO_INSERT = [ CachedElement( id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"), parent_id=convert_str_uuid_to_hex("12341234-1234-1234-1234-123412341234"), type="something", polygon=json.dumps([[1, 1], [2, 2], [2, 1], [1, 2]]), worker_version_id=convert_str_uuid_to_hex( "56785678-5678-5678-5678-567856785678" ), ), CachedElement( id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"), parent_id=convert_str_uuid_to_hex("12341234-1234-1234-1234-123412341234"), type="something", polygon=json.dumps([[1, 1], [2, 2], [2, 1], [1, 2]]), worker_version_id=convert_str_uuid_to_hex( "56785678-5678-5678-5678-567856785678" ), ), ] def test_init_non_existent_path(): with pytest.raises(sqlite3.OperationalError) as e: LocalDB("path/not/found.sqlite") assert str(e.value) == "unable to open database file" def test_init(): db_path = f"{FIXTURES}/db.sqlite" LocalDB(db_path) assert os.path.isfile(db_path) def test_create_tables_existing_table(): db_path = f"{FIXTURES}/tables.sqlite" cache = LocalDB(db_path) with open(db_path, "rb") as before_file: before = before_file.read() cache.create_tables() with open(db_path, "rb") as after_file: after = after_file.read() assert before == after, "Cache was modified" def test_create_tables(): db_path = f"{FIXTURES}/db.sqlite" cache = LocalDB(db_path) cache.create_tables() expected_cache = LocalDB(f"{FIXTURES}/tables.sqlite") # For each table in our new generated cache, we are checking that its structure # is the same as the one saved in data/tables.sqlite for table in cache.cursor.execute( "SELECT name FROM sqlite_master WHERE type = 'table'" ): name = table["name"] expected_table = expected_cache.cursor.execute( f"SELECT sql FROM sqlite_master WHERE name = '{name}'" ).fetchone()["sql"] generated_table = cache.cursor.execute( f"SELECT sql FROM sqlite_master WHERE name = '{name}'" ).fetchone()["sql"] assert expected_table == generated_table def test_insert_empty_lines(): db_path = f"{FIXTURES}/db.sqlite" cache = LocalDB(db_path) cache.create_tables() cache.insert("elements", []) expected_cache = LocalDB(f"{FIXTURES}/tables.sqlite") assert ( cache.cursor.execute("SELECT * FROM elements").fetchall() == expected_cache.cursor.execute("SELECT * FROM elements").fetchall() ) def test_insert_existing_lines(): db_path = f"{FIXTURES}/lines.sqlite" cache = LocalDB(db_path) cache.create_tables() with open(db_path, "rb") as before_file: before = before_file.read() with pytest.raises(sqlite3.IntegrityError) as e: cache.insert("elements", ELEMENTS_TO_INSERT) assert str(e.value) == "UNIQUE constraint failed: elements.id" with open(db_path, "rb") as after_file: after = after_file.read() assert before == after, "Cache was modified" def test_insert(): db_path = f"{FIXTURES}/db.sqlite" cache = LocalDB(db_path) cache.create_tables() cache.insert("elements", ELEMENTS_TO_INSERT) generated_rows = cache.cursor.execute("SELECT * FROM elements").fetchall() expected_cache = LocalDB(f"{FIXTURES}/lines.sqlite") assert ( generated_rows == expected_cache.cursor.execute("SELECT * FROM elements").fetchall() ) assert [CachedElement(**dict(row)) for row in generated_rows] == ELEMENTS_TO_INSERT def test_fetch_all(): db_path = f"{FIXTURES}/lines.sqlite" cache = LocalDB(db_path) cache.create_tables() children = cache.fetch("elements") assert children == ELEMENTS_TO_INSERT def test_fetch_with_where(): db_path = f"{FIXTURES}/lines.sqlite" cache = LocalDB(db_path) cache.create_tables() children = cache.fetch( "elements", where=[ ( "parent_id", "=", convert_str_uuid_to_hex("12341234-1234-1234-1234-123412341234"), ), ("id", "LIKE", "%1111%"), ], ) assert children == [ELEMENTS_TO_INSERT[0]]