Skip to content
Snippets Groups Projects
Commit 5aa47513 authored by Eva Bardou's avatar Eva Bardou
Browse files

Move use_cache implem + Add initial boolean on elements table

parent 8a3c8ad5
No related branches found
No related tags found
1 merge request!67Store created elements in a local SQLite database
Pipeline #78293 passed
...@@ -10,12 +10,15 @@ SQL_ELEMENTS_TABLE_CREATION = """CREATE TABLE IF NOT EXISTS elements ( ...@@ -10,12 +10,15 @@ SQL_ELEMENTS_TABLE_CREATION = """CREATE TABLE IF NOT EXISTS elements (
name TEXT NOT NULL, name TEXT NOT NULL,
type TEXT NOT NULL, type TEXT NOT NULL,
polygon TEXT, polygon TEXT,
initial BOOLEAN DEFAULT 0 NOT NULL,
worker_version_id VARCHAR(32) worker_version_id VARCHAR(32)
)""" )"""
CachedElement = namedtuple( CachedElement = namedtuple(
"CachedElement", ["id", "parent_id", "name", "type", "polygon", "worker_version_id"] "CachedElement",
["id", "parent_id", "name", "type", "polygon", "worker_version_id", "initial"],
defaults=[0],
) )
......
...@@ -27,7 +27,7 @@ CACHE_DIR = f"/data/{os.environ.get('TASK_ID')}" ...@@ -27,7 +27,7 @@ CACHE_DIR = f"/data/{os.environ.get('TASK_ID')}"
class BaseWorker(object): class BaseWorker(object):
def __init__(self, description="Arkindex Base Worker"): def __init__(self, description="Arkindex Base Worker", use_cache=False):
self.parser = argparse.ArgumentParser(description=description) self.parser = argparse.ArgumentParser(description=description)
# Setup workdir either in Ponos environment or on host's home # Setup workdir either in Ponos environment or on host's home
...@@ -50,13 +50,16 @@ class BaseWorker(object): ...@@ -50,13 +50,16 @@ class BaseWorker(object):
logger.info(f"Worker will use {self.work_dir} as working directory") logger.info(f"Worker will use {self.work_dir} as working directory")
if os.path.isdir(CACHE_DIR): self.use_cache = use_cache
cache_path = os.path.join(CACHE_DIR, "db.sqlite")
else: if self.use_cache:
cache_path = os.path.join(os.getcwd(), "db.sqlite") if os.environ.get("TASK_ID") and os.path.isdir(CACHE_DIR):
cache_path = os.path.join(CACHE_DIR, "db.sqlite")
else:
cache_path = os.path.join(os.getcwd(), "db.sqlite")
self.cache = LocalDB(cache_path) self.cache = LocalDB(cache_path)
self.cache.create_tables() self.cache.create_tables()
@property @property
def is_read_only(self): def is_read_only(self):
...@@ -214,8 +217,8 @@ class ActivityState(Enum): ...@@ -214,8 +217,8 @@ class ActivityState(Enum):
class ElementsWorker(BaseWorker): class ElementsWorker(BaseWorker):
def __init__(self, description="Arkindex Elements Worker"): def __init__(self, description="Arkindex Elements Worker", use_cache=False):
super().__init__(description) super().__init__(description, use_cache)
# Add report concerning elements # Add report concerning elements
self.report = Reporter("unknown worker") self.report = Reporter("unknown worker")
...@@ -463,24 +466,25 @@ class ElementsWorker(BaseWorker): ...@@ -463,24 +466,25 @@ class ElementsWorker(BaseWorker):
for element in elements: for element in elements:
self.report.add_element(parent.id, element["type"]) self.report.add_element(parent.id, element["type"])
# Store elements in local cache if self.use_cache:
try: # Store elements in local cache
parent_id_hex = convert_str_uuid_to_hex(parent.id) try:
worker_version_id_hex = convert_str_uuid_to_hex(self.worker_version_id) parent_id_hex = convert_str_uuid_to_hex(parent.id)
to_insert = [ worker_version_id_hex = convert_str_uuid_to_hex(self.worker_version_id)
CachedElement( to_insert = [
id=convert_str_uuid_to_hex(created_ids[idx]["id"]), CachedElement(
parent_id=parent_id_hex, id=convert_str_uuid_to_hex(created_ids[idx]["id"]),
name=element["name"], parent_id=parent_id_hex,
type=element["type"], name=element["name"],
polygon=json.dumps(element["polygon"]), type=element["type"],
worker_version_id=worker_version_id_hex, polygon=json.dumps(element["polygon"]),
) worker_version_id=worker_version_id_hex,
for idx, element in enumerate(elements) )
] for idx, element in enumerate(elements)
self.cache.insert("elements", to_insert) ]
except sqlite3.IntegrityError as e: self.cache.insert("elements", to_insert)
logger.warning(f"Couldn't save created elements in local cache: {e}") except sqlite3.IntegrityError as e:
logger.warning(f"Couldn't save created elements in local cache: {e}")
return created_ids return created_ids
......
...@@ -164,6 +164,16 @@ def mock_elements_worker(monkeypatch, mock_worker_version_api): ...@@ -164,6 +164,16 @@ def mock_elements_worker(monkeypatch, mock_worker_version_api):
return worker return worker
@pytest.fixture
def mock_elements_worker_with_cache(monkeypatch, mock_worker_version_api):
"""Build and configure an ElementsWorker using SQLite cache with fixed CLI parameters to avoid issues with pytest"""
monkeypatch.setattr(sys, "argv", ["worker"])
worker = ElementsWorker(use_cache=True)
worker.configure()
return worker
@pytest.fixture @pytest.fixture
def fake_page_element(): def fake_page_element():
with open(FIXTURES_DIR / "page_element.json", "r") as f: with open(FIXTURES_DIR / "page_element.json", "r") as f:
......
No preview for this file type
No preview for this file type
...@@ -17,7 +17,6 @@ def test_init_default_local_share(monkeypatch): ...@@ -17,7 +17,6 @@ def test_init_default_local_share(monkeypatch):
assert worker.work_dir == os.path.expanduser("~/.local/share/arkindex") assert worker.work_dir == os.path.expanduser("~/.local/share/arkindex")
assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234" assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234"
assert worker.cache
def test_init_default_xdg_data_home(monkeypatch): def test_init_default_xdg_data_home(monkeypatch):
...@@ -27,6 +26,14 @@ def test_init_default_xdg_data_home(monkeypatch): ...@@ -27,6 +26,14 @@ def test_init_default_xdg_data_home(monkeypatch):
assert worker.work_dir == f"{path}/arkindex" assert worker.work_dir == f"{path}/arkindex"
assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234" assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234"
def test_init_with_local_cache(monkeypatch):
worker = BaseWorker(use_cache=True)
assert worker.work_dir == os.path.expanduser("~/.local/share/arkindex")
assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234"
assert worker.use_cache
assert worker.cache assert worker.cache
...@@ -37,7 +44,6 @@ def test_init_var_ponos_data_given(monkeypatch): ...@@ -37,7 +44,6 @@ def test_init_var_ponos_data_given(monkeypatch):
assert worker.work_dir == f"{path}/current" assert worker.work_dir == f"{path}/current"
assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234" assert worker.worker_version_id == "12341234-1234-1234-1234-123412341234"
assert worker.cache
def test_init_var_worker_version_id_missing(monkeypatch, mock_user_api): def test_init_var_worker_version_id_missing(monkeypatch, mock_user_api):
...@@ -48,7 +54,6 @@ def test_init_var_worker_version_id_missing(monkeypatch, mock_user_api): ...@@ -48,7 +54,6 @@ def test_init_var_worker_version_id_missing(monkeypatch, mock_user_api):
assert worker.worker_version_id is None assert worker.worker_version_id is None
assert worker.is_read_only is True assert worker.is_read_only is True
assert worker.config == {} # default empty case assert worker.config == {} # default empty case
assert worker.cache
def test_init_var_worker_local_file(monkeypatch, tmp_path, mock_user_api): def test_init_var_worker_local_file(monkeypatch, tmp_path, mock_user_api):
...@@ -63,7 +68,6 @@ def test_init_var_worker_local_file(monkeypatch, tmp_path, mock_user_api): ...@@ -63,7 +68,6 @@ def test_init_var_worker_local_file(monkeypatch, tmp_path, mock_user_api):
assert worker.worker_version_id is None assert worker.worker_version_id is None
assert worker.is_read_only is True assert worker.is_read_only is True
assert worker.config == {"localKey": "abcdef123"} # Use a local file for devs assert worker.config == {"localKey": "abcdef123"} # Use a local file for devs
assert worker.cache
config.unlink() config.unlink()
......
...@@ -642,7 +642,7 @@ def test_create_elements_api_error(responses, mock_elements_worker): ...@@ -642,7 +642,7 @@ def test_create_elements_api_error(responses, mock_elements_worker):
] ]
def test_create_elements(responses, mock_elements_worker): def test_create_elements(responses, mock_elements_worker_with_cache):
elt = Element({"id": "12341234-1234-1234-1234-123412341234"}) elt = Element({"id": "12341234-1234-1234-1234-123412341234"})
responses.add( responses.add(
responses.POST, responses.POST,
...@@ -651,7 +651,7 @@ def test_create_elements(responses, mock_elements_worker): ...@@ -651,7 +651,7 @@ def test_create_elements(responses, mock_elements_worker):
json=[{"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08"}], json=[{"id": "497f6eca-6276-4993-bfeb-53cbbbba6f08"}],
) )
created_ids = mock_elements_worker.create_elements( created_ids = mock_elements_worker_with_cache.create_elements(
parent=elt, parent=elt,
elements=[ elements=[
{ {
...@@ -684,7 +684,7 @@ def test_create_elements(responses, mock_elements_worker): ...@@ -684,7 +684,7 @@ def test_create_elements(responses, mock_elements_worker):
cache_path = f"{CACHE_DIR}/db.sqlite" cache_path = f"{CACHE_DIR}/db.sqlite"
assert os.path.isfile(cache_path) assert os.path.isfile(cache_path)
rows = mock_elements_worker.cache.cursor.execute( rows = mock_elements_worker_with_cache.cache.cursor.execute(
"SELECT * FROM elements" "SELECT * FROM elements"
).fetchall() ).fetchall()
assert [CachedElement(**dict(row)) for row in rows] == [ assert [CachedElement(**dict(row)) for row in rows] == [
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment