diff --git a/arkindex_worker/cache.py b/arkindex_worker/cache.py
index 1322e2bc5e56c26ca88a78fd12d718e32da21e65..095f703fcd52e74b8acf2e95fe5dafcd916c06ef 100644
--- a/arkindex_worker/cache.py
+++ b/arkindex_worker/cache.py
@@ -33,6 +33,21 @@ class JSONField(Field):
         return json.loads(value)
 
 
+def merge_parents_caches(self, parents_cache_paths):
+    for idx, parent_cache in enumerate(parents_cache_paths):
+        statements = [
+            "PRAGMA page_size=80000;",
+            "PRAGMA synchronous=OFF;",
+            f"ATTACH DATABASE '{parent_cache}' AS source{idx};",
+            f"REPLACE INTO elements SELECT * FROM source{idx}.elements;",
+            f"REPLACE INTO transcriptions SELECT * FROM source{idx}.transcriptions;",
+        ]
+
+        for statement in statements:
+            self.cursor.execute(statement)
+        self.db.commit()
+
+
 class CachedElement(Model):
     id = UUIDField(primary_key=True)
     parent_id = UUIDField(null=True)
diff --git a/arkindex_worker/worker.py b/arkindex_worker/worker.py
index 1a4282c9f6265c297fd3224804e4163a7b6d7fc7..81c43f67a4d0733b6ccd311fe93cec6d48376e63 100644
--- a/arkindex_worker/worker.py
+++ b/arkindex_worker/worker.py
@@ -33,6 +33,7 @@ from arkindex_worker.models import Element
 from arkindex_worker.reporting import Reporter
 
 MANUAL_SLUG = "manual"
+DATA_DIR = "/data"
 CACHE_DIR = f"/data/{os.environ.get('TASK_ID')}"
 
 
@@ -151,6 +152,29 @@ class BaseWorker(object):
         # Load all required secrets
         self.secrets = {name: self.load_secret(name) for name in required_secrets}
 
+        # Merging parents caches (if there are any) in the current task local cache
+        if self.cache and os.environ.get("TASK_ID"):
+            task = self.api_client.request(
+                "RetrieveTaskFromAgent", id=os.environ.get("TASK_ID")
+            )
+
+            parents_cache_paths = []
+            for parent in task["parents"]:
+                parent_cache_path = f"{DATA_DIR}/{parent}/db.sqlite"
+                if os.path.isfile(parent_cache_path):
+                    parents_cache_paths.append(parent_cache_path)
+
+            # Only one parent cache, we can just copy it into our current task local cache
+            if len(parents_cache_paths) == 1:
+                with open(self.cache.path, "rb+") as cache_file, open(
+                    parents_cache_paths[0], "rb"
+                ) as parent_cache_file:
+                    cache_file.truncate(0)
+                    cache_file.write(parent_cache_file.read())
+            # Many parents caches, we have to merge all of them in our current task local cache
+            elif len(parents_cache_paths) > 1:
+                self.cache.merge_parents_caches(parents_cache_paths)
+
     def load_secret(self, name):
         """Load all secrets described in the worker configuration"""
         secret = None
diff --git a/tests/conftest.py b/tests/conftest.py
index ca6cd6f3e9a71c0e64505e9a90f5edf0277f29a4..64aba5d69404e7fe5ce1a4371c93848b5d9bac43 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -13,7 +13,7 @@ import yaml
 from arkindex.mock import MockApiClient
 from arkindex_worker.cache import CachedElement, CachedTranscription
 from arkindex_worker.git import GitHelper, GitlabHelper
-from arkindex_worker.worker import ElementsWorker
+from arkindex_worker.worker import BaseWorker, ElementsWorker
 
 FIXTURES_DIR = Path(__file__).resolve().parent / "data"
 
@@ -98,6 +98,42 @@ def temp_working_directory(monkeypatch, tmp_path):
     monkeypatch.setattr(os, "getcwd", _getcwd)
 
 
+@pytest.fixture
+def first_parent_folder():
+    cache_dir = f"{CACHE_DIR}/first_parent_id"
+    os.mkdir(cache_dir)
+    yield
+    if os.path.isdir(cache_dir):
+        os.rmdir(cache_dir)
+
+
+@pytest.fixture
+def second_parent_folder():
+    cache_dir = f"{CACHE_DIR}/second_parent_id"
+    os.mkdir(cache_dir)
+    yield
+    if os.path.isdir(cache_dir):
+        os.rmdir(cache_dir)
+
+
+@pytest.fixture
+def first_parent_cache(first_parent_folder):
+    parent_cache = LocalDB(f"{CACHE_DIR}/first_parent_id/db.sqlite")
+    parent_cache.create_tables()
+    yield
+    if os.path.isfile(parent_cache.path):
+        os.remove(parent_cache.path)
+
+
+@pytest.fixture
+def second_parent_cache(second_parent_folder):
+    parent_cache = LocalDB(f"{CACHE_DIR}/second_parent_id/db.sqlite")
+    parent_cache.create_tables()
+    yield
+    if os.path.isfile(parent_cache.path):
+        os.remove(parent_cache.path)
+
+
 @pytest.fixture(autouse=True)
 def give_worker_version_id_env_variable(monkeypatch):
     monkeypatch.setenv("WORKER_VERSION_ID", "12341234-1234-1234-1234-123412341234")
@@ -170,6 +206,17 @@ def mock_elements_worker(monkeypatch, mock_worker_version_api):
     return worker
 
 
+@pytest.fixture
+def mock_base_worker_with_cache(mocker, monkeypatch, mock_worker_version_api):
+    """Build a BaseWorker using SQLite cache"""
+    monkeypatch.setattr(sys, "argv", ["worker"])
+
+    worker = BaseWorker(use_cache=True)
+    monkeypatch.setenv("TASK_ID", "my_task")
+    mocker.patch("arkindex_worker.worker.DATA_DIR", CACHE_DIR)
+    return worker
+
+
 @pytest.fixture
 def mock_elements_worker_with_cache(monkeypatch, mock_worker_version_api):
     """Build and configure an ElementsWorker using SQLite cache with fixed CLI parameters to avoid issues with pytest"""
diff --git a/tests/test_base_worker.py b/tests/test_base_worker.py
index 40279ea0edab216f73cabd3245407d5fa6049995..1fe549b64a39ada158a2d9bb92bdc966e3c98042 100644
--- a/tests/test_base_worker.py
+++ b/tests/test_base_worker.py
@@ -1,4 +1,5 @@
 # -*- coding: utf-8 -*-
+import json
 import logging
 import os
 import sys
@@ -9,8 +10,42 @@ import pytest
 
 from arkindex.mock import MockApiClient
 from arkindex_worker import logger
+from arkindex_worker.cache import CachedElement, CachedTranscription, LocalDB
+from arkindex_worker.utils import convert_str_uuid_to_hex
 from arkindex_worker.worker import BaseWorker
 
+CACHE_DIR = str(Path(__file__).resolve().parent / "data/cache")
+FIRST_PARENT_CACHE = f"{CACHE_DIR}/first_parent_id/db.sqlite"
+SECOND_PARENT_CACHE = f"{CACHE_DIR}/second_parent_id/db.sqlite"
+FIRST_ELEM_TO_INSERT = CachedElement(
+    id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"),
+    parent_id=convert_str_uuid_to_hex("12341234-1234-1234-1234-123412341234"),
+    type="something",
+    polygon=json.dumps([[1, 1], [2, 2], [2, 1], [1, 2]]),
+    worker_version_id=convert_str_uuid_to_hex("56785678-5678-5678-5678-567856785678"),
+)
+SECOND_ELEM_TO_INSERT = CachedElement(
+    id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"),
+    parent_id=convert_str_uuid_to_hex("12341234-1234-1234-1234-123412341234"),
+    type="something",
+    polygon=json.dumps([[1, 1], [2, 2], [2, 1], [1, 2]]),
+    worker_version_id=convert_str_uuid_to_hex("56785678-5678-5678-5678-567856785678"),
+)
+FIRST_TR_TO_INSERT = CachedTranscription(
+    id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"),
+    element_id=convert_str_uuid_to_hex("11111111-1111-1111-1111-111111111111"),
+    text="Hello!",
+    confidence=0.42,
+    worker_version_id=convert_str_uuid_to_hex("56785678-5678-5678-5678-567856785678"),
+)
+SECOND_TR_TO_INSERT = CachedTranscription(
+    id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"),
+    element_id=convert_str_uuid_to_hex("22222222-2222-2222-2222-222222222222"),
+    text="How are you?",
+    confidence=0.42,
+    worker_version_id=convert_str_uuid_to_hex("56785678-5678-5678-5678-567856785678"),
+)
+
 
 def test_init_default_local_share(monkeypatch):
     worker = BaseWorker()
@@ -115,6 +150,266 @@ def test_cli_arg_verbose_given(mocker, mock_worker_version_api, mock_user_api):
     logger.setLevel(logging.NOTSET)
 
 
+def test_configure_cache_merging_no_parent(responses, mock_base_worker_with_cache):
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": []},
+    )
+
+    cache_path = mock_base_worker_with_cache.cache.path
+    with open(cache_path, "rb") as before_file:
+        before = before_file.read()
+
+    mock_base_worker_with_cache.configure()
+
+    with open(cache_path, "rb") as after_file:
+        after = after_file.read()
+
+    assert before == after, "Cache was modified"
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
+def test_configure_cache_merging_one_parent_without_file(
+    responses, mock_base_worker_with_cache, first_parent_folder
+):
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": ["first_parent_id"]},
+    )
+
+    cache_path = mock_base_worker_with_cache.cache.path
+    with open(cache_path, "rb") as before_file:
+        before = before_file.read()
+
+    mock_base_worker_with_cache.configure()
+
+    with open(cache_path, "rb") as after_file:
+        after = after_file.read()
+
+    assert before == after, "Cache was modified"
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
+def test_configure_cache_merging_one_parent(
+    responses, mock_base_worker_with_cache, first_parent_cache
+):
+    parent_cache = LocalDB(FIRST_PARENT_CACHE)
+    parent_cache.insert("elements", [FIRST_ELEM_TO_INSERT])
+    parent_cache.insert("transcriptions", [FIRST_TR_TO_INSERT])
+
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": ["first_parent_id"]},
+    )
+
+    mock_base_worker_with_cache.configure()
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM elements"
+    ).fetchall()
+    assert (
+        stored_rows == parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+    )
+    assert [CachedElement(**dict(row)) for row in stored_rows] == [FIRST_ELEM_TO_INSERT]
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM transcriptions"
+    ).fetchall()
+    assert (
+        stored_rows
+        == parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+    )
+    assert [CachedTranscription(**dict(row)) for row in stored_rows] == [
+        FIRST_TR_TO_INSERT
+    ]
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
+def test_configure_cache_merging_multiple_parents_one_file(
+    responses, mock_base_worker_with_cache, first_parent_cache, second_parent_folder
+):
+    parent_cache = LocalDB(FIRST_PARENT_CACHE)
+    parent_cache.insert("elements", [FIRST_ELEM_TO_INSERT])
+    parent_cache.insert("transcriptions", [FIRST_TR_TO_INSERT])
+
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": ["first_parent_id", "second_parent_id"]},
+    )
+
+    mock_base_worker_with_cache.configure()
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM elements"
+    ).fetchall()
+    assert (
+        stored_rows == parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+    )
+    assert [CachedElement(**dict(row)) for row in stored_rows] == [FIRST_ELEM_TO_INSERT]
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM transcriptions"
+    ).fetchall()
+    assert (
+        stored_rows
+        == parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+    )
+    assert [CachedTranscription(**dict(row)) for row in stored_rows] == [
+        FIRST_TR_TO_INSERT
+    ]
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
+def test_configure_cache_merging_multiple_parents_differing_lines(
+    responses, mock_base_worker_with_cache, first_parent_cache, second_parent_cache
+):
+    # Inserting differing lines in both parents caches
+    parent_cache = LocalDB(FIRST_PARENT_CACHE)
+    parent_cache = LocalDB(FIRST_PARENT_CACHE)
+    parent_cache.insert("elements", [FIRST_ELEM_TO_INSERT])
+    parent_cache.insert("transcriptions", [FIRST_TR_TO_INSERT])
+    second_parent_cache = LocalDB(SECOND_PARENT_CACHE)
+    second_parent_cache.insert("elements", [SECOND_ELEM_TO_INSERT])
+    second_parent_cache.insert("transcriptions", [SECOND_TR_TO_INSERT])
+
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": ["first_parent_id", "second_parent_id"]},
+    )
+
+    mock_base_worker_with_cache.configure()
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM elements"
+    ).fetchall()
+    assert (
+        stored_rows
+        == parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+        + second_parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+    )
+    assert [CachedElement(**dict(row)) for row in stored_rows] == [
+        FIRST_ELEM_TO_INSERT,
+        SECOND_ELEM_TO_INSERT,
+    ]
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM transcriptions"
+    ).fetchall()
+    assert (
+        stored_rows
+        == parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+        + second_parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+    )
+    assert [CachedTranscription(**dict(row)) for row in stored_rows] == [
+        FIRST_TR_TO_INSERT,
+        SECOND_TR_TO_INSERT,
+    ]
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
+def test_configure_cache_merging_multiple_parents_identical_lines(
+    responses, mock_base_worker_with_cache, first_parent_cache, second_parent_cache
+):
+    # Inserting identical lines in both parents caches
+    parent_cache = LocalDB(FIRST_PARENT_CACHE)
+    parent_cache.insert("elements", [FIRST_ELEM_TO_INSERT, SECOND_ELEM_TO_INSERT])
+    parent_cache.insert("transcriptions", [FIRST_TR_TO_INSERT, SECOND_TR_TO_INSERT])
+    second_parent_cache = LocalDB(SECOND_PARENT_CACHE)
+    second_parent_cache.insert(
+        "elements", [FIRST_ELEM_TO_INSERT, SECOND_ELEM_TO_INSERT]
+    )
+    second_parent_cache.insert(
+        "transcriptions", [FIRST_TR_TO_INSERT, SECOND_TR_TO_INSERT]
+    )
+
+    responses.add(
+        responses.GET,
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+        status=200,
+        json={"parents": ["first_parent_id", "second_parent_id"]},
+    )
+
+    mock_base_worker_with_cache.configure()
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM elements"
+    ).fetchall()
+    assert (
+        stored_rows == parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+    )
+    assert (
+        stored_rows
+        == second_parent_cache.cursor.execute("SELECT * FROM elements").fetchall()
+    )
+    assert [CachedElement(**dict(row)) for row in stored_rows] == [
+        FIRST_ELEM_TO_INSERT,
+        SECOND_ELEM_TO_INSERT,
+    ]
+
+    stored_rows = mock_base_worker_with_cache.cache.cursor.execute(
+        "SELECT * FROM transcriptions"
+    ).fetchall()
+    assert (
+        stored_rows
+        == parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+    )
+    assert (
+        stored_rows
+        == second_parent_cache.cursor.execute("SELECT * FROM transcriptions").fetchall()
+    )
+    assert [CachedTranscription(**dict(row)) for row in stored_rows] == [
+        FIRST_TR_TO_INSERT,
+        SECOND_TR_TO_INSERT,
+    ]
+
+    assert len(responses.calls) == 3
+    assert [call.request.url for call in responses.calls] == [
+        "http://testserver/api/v1/user/",
+        "http://testserver/api/v1/workers/versions/12341234-1234-1234-1234-123412341234/",
+        "http://testserver/ponos/v1/task/my_task/from-agent/",
+    ]
+
+
 def test_load_missing_secret():
     worker = BaseWorker()
     worker.api_client = MockApiClient()