diff --git a/arkindex/documents/api/elements.py b/arkindex/documents/api/elements.py
index fe38b288b363e1fb2f00debf5209cb9fbef16d2c..cc324ab4394bca199b369ef1a938a3319f575208 100644
--- a/arkindex/documents/api/elements.py
+++ b/arkindex/documents/api/elements.py
@@ -47,6 +47,7 @@ from arkindex.documents.serializers.elements import (
     CorpusSerializer,
     ElementBulkSerializer,
     ElementCreateSerializer,
+    ElementDestinationSerializer,
     ElementListSerializer,
     ElementNeighborsSerializer,
     ElementParentSerializer,
@@ -63,7 +64,7 @@ from arkindex.project.openapi import AutoSchema
 from arkindex.project.pagination import LargePageNumberPagination, PageNumberPagination
 from arkindex.project.permissions import IsVerified, IsVerifiedOrReadOnly
 from arkindex.project.tools import BulkMap
-from arkindex.project.triggers import corpus_delete, element_trash, worker_results_delete
+from arkindex.project.triggers import corpus_delete, element_trash, move_element, worker_results_delete
 from arkindex.users.models import Role
 from arkindex.users.utils import filter_rights
 
@@ -1410,3 +1411,26 @@ class WorkerResultsDestroy(CorpusACLMixin, DestroyAPIView):
         )
 
         return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+@extend_schema_view(
+    post=extend_schema(operation_id='MoveElement', tags=['elements']),
+)
+class ElementMove(CreateAPIView):
+    """
+    Move an element to a new destination folder
+    """
+    serializer_class = ElementDestinationSerializer
+    permission_classes = (IsVerified, )
+
+    def create(self, request, *args, **kwargs):
+        serializer = self.get_serializer(data=request.data)
+        serializer.is_valid(raise_exception=True)
+
+        source = serializer.validated_data['source']
+        destination = serializer.validated_data['destination']
+        serializer.perform_create_checks(source, destination)
+
+        move_element(source=source, destination=destination, user_id=self.request.user.id)
+
+        return Response(serializer.data, status=status.HTTP_200_OK)
diff --git a/arkindex/documents/serializers/elements.py b/arkindex/documents/serializers/elements.py
index ab8aeff1c4556a984aab00107ece56a7e61c3c49..4e4658fa24f9b659ca83d5a81054e018295b2093 100644
--- a/arkindex/documents/serializers/elements.py
+++ b/arkindex/documents/serializers/elements.py
@@ -696,3 +696,40 @@ class WorkerStatisticsSerializer(serializers.Serializer):
     started = serializers.IntegerField(read_only=True)
     processed = serializers.IntegerField(read_only=True)
     error = serializers.IntegerField(read_only=True)
+
+
+class ElementDestinationSerializer(serializers.Serializer):
+    source = serializers.PrimaryKeyRelatedField(queryset=Element.objects.none())
+    destination = serializers.PrimaryKeyRelatedField(queryset=Element.objects.none())
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        if not self.context.get('request'):
+            # Do not raise Error in order to create OpenAPI schema
+            return
+        corpora = Corpus.objects.writable(self.context['request'].user)
+        self.fields['source'].queryset = Element.objects.filter(corpus__in=corpora).select_related('corpus')
+        self.fields['destination'].queryset = Element.objects.filter(corpus__in=corpora).select_related('corpus')
+
+    def validate(self, data):
+        data = super().validate(data)
+        source = data.get('source')
+        destination = data.get('destination')
+
+        if destination.id == source.id:
+            raise ValidationError({'destination': ['A source element cannot be moved into itself']})
+        if destination.corpus != source.corpus:
+            raise ValidationError({'destination': ['A source element cannot be moved to a destination from another corpus']})
+        return data
+
+    def perform_create_checks(self, source, destination):
+        # Assert destination is not a source's direct ancestor already
+        if ElementPath.objects.filter(element_id=source.id, path__last=destination.id).exists():
+            raise ValidationError({'destination': [
+                "'{}' is already a direct parent of '{}'".format(destination.id, source.id)
+            ]})
+        # Assert destination is not a source's descendant
+        if ElementPath.objects.filter(element_id=destination.id, path__contains=[source.id]).exists():
+            raise ValidationError({'destination': [
+                "'{}' is a child of element '{}'".format(destination.id, source.id)
+            ]})
diff --git a/arkindex/documents/tasks.py b/arkindex/documents/tasks.py
index 06f466b6f6b6e5ebd1f10c586ecde601d24af479..eaa30844059de89c0d086033ec01f7ff08fb9cc1 100644
--- a/arkindex/documents/tasks.py
+++ b/arkindex/documents/tasks.py
@@ -169,6 +169,14 @@ def worker_results_delete(corpus_id: str, version_id: str, parent_id: str) -> No
     transcriptions._raw_delete(using='default')
 
 
+@job('high')
+def move_element(source: Element, destination: Element) -> None:
+    paths = ElementPath.objects.filter(element_id=source.id)
+    for path in paths:
+        Element.objects.get(id=path.path[-1]).remove_child(source)
+    source.add_parent(destination)
+
+
 @job('default', timeout=3600)
 def initialize_activity(process: DataImport):
     """
diff --git a/arkindex/documents/tests/tasks/test_move_element.py b/arkindex/documents/tests/tasks/test_move_element.py
new file mode 100644
index 0000000000000000000000000000000000000000..d1d9fd442c3162ab318ca805ec7a72640935563b
--- /dev/null
+++ b/arkindex/documents/tests/tasks/test_move_element.py
@@ -0,0 +1,98 @@
+from unittest.mock import patch
+from uuid import UUID
+
+from django.db import connections
+from django.db.backends.base.base import _thread
+
+from arkindex.documents.models import ElementPath
+from arkindex.documents.tasks import move_element
+from arkindex.project.tests import FixtureTestCase
+
+PATHS_IDS = [
+    UUID('00000000-0000-0000-0000-000000000000'),
+    UUID('11111111-1111-1111-1111-111111111111'),
+    UUID('22222222-2222-2222-2222-222222222222'),
+    UUID('33333333-3333-3333-3333-333333333333'),
+    UUID('44444444-4444-4444-4444-444444444444'),
+    UUID('55555555-5555-5555-5555-555555555555'),
+    UUID('66666666-6666-6666-6666-666666666666'),
+    UUID('77777777-7777-7777-7777-777777777777'),
+    UUID('88888888-8888-8888-8888-888888888888'),
+]
+
+
+class TestMoveElement(FixtureTestCase):
+
+    @classmethod
+    def setUpTestData(cls):
+        super().setUpTestData()
+        cls.page_type = cls.corpus.types.get(slug='page')
+        cls.destination = cls.corpus.elements.get(name='Volume 2')
+        cls.parent = cls.corpus.elements.get(name='Volume 1')
+        cls.source_with_children = cls.corpus.elements.get(name='Volume 1, page 1r')
+        cls.source_without_child = cls.corpus.elements.get(name='Volume 1, page 2r')
+        ElementPath.objects.filter(path__contains=[cls.source_without_child.id]).delete()
+
+    @patch.object(ElementPath._meta.get_field('id'), 'get_default')
+    def test_run_on_source_without_child(self, default_field_mock):
+        default_field_mock.return_value = PATHS_IDS[0]
+
+        # No child on this page
+        self.assertEqual(ElementPath.objects.filter(path__contains=[self.source_without_child.id]).count(), 0)
+
+        source_paths = ElementPath.objects.filter(element_id=self.source_without_child.id)
+        self.assertEqual(len(source_paths), 1)
+        self.assertEqual(list(source_paths.values('path')), [{'path': [self.parent.id]}])
+
+        with self.assertExactQueries('element_move_without_child.sql', params={
+            'source_id': str(self.source_without_child.id),
+            'parent_id': str(self.parent.id),
+            'destination_id': str(self.destination.id),
+            'page_type_id': str(self.page_type.id),
+            'path_id': str(PATHS_IDS[0]),
+            'savepoints': [f"s{_thread.get_ident()}_x{connections['default'].savepoint_state + 1}", f"s{_thread.get_ident()}_x{connections['default'].savepoint_state + 2}"]
+        }):
+            move_element(self.source_without_child, self.destination)
+
+        self.assertEqual(len(source_paths), 1)
+        self.assertEqual(list(source_paths.values('path')), [{'path': [self.destination.id]}])
+
+    @patch.object(ElementPath._meta.get_field('id'), 'get_default')
+    def test_run_on_source_with_children(self, default_field_mock):
+        default_field_mock.side_effect = PATHS_IDS
+
+        # 4 children on this page
+        children_paths = ElementPath.objects.filter(path__contains=[self.source_with_children.id])
+        self.assertEqual(children_paths.count(), 4)
+        self.assertEqual(list(children_paths.values('path')), [
+            {'path': [self.parent.id, self.source_with_children.id]},
+            {'path': [self.parent.id, self.source_with_children.id]},
+            {'path': [self.parent.id, self.source_with_children.id]},
+            {'path': [self.parent.id, self.source_with_children.id]}
+        ])
+
+        source_paths = ElementPath.objects.filter(element_id=self.source_with_children.id)
+        self.assertEqual(len(source_paths), 1)
+        self.assertEqual(list(source_paths.values('path')), [{'path': [self.parent.id]}])
+
+        with self.assertExactQueries('element_move_with_children.sql', params={
+            'source_id': str(self.source_with_children.id),
+            'parent_id': str(self.parent.id),
+            'destination_id': str(self.destination.id),
+            'children_ids': [str(id) for id in children_paths.values_list('element_id', flat=True)],
+            'page_type_id': str(self.page_type.id),
+            'paths_ids': [str(id) for id in PATHS_IDS],
+            'savepoints': [f"s{_thread.get_ident()}_x{connections['default'].savepoint_state + 1}", f"s{_thread.get_ident()}_x{connections['default'].savepoint_state + 2}"]
+        }):
+            move_element(self.source_with_children, self.destination)
+
+        self.assertEqual(len(source_paths), 1)
+        self.assertEqual(list(source_paths.values('path')), [{'path': [self.destination.id]}])
+
+        # Assert children were also moved
+        self.assertEqual(list(children_paths.values('path')), [
+            {'path': [self.destination.id, self.source_with_children.id]},
+            {'path': [self.destination.id, self.source_with_children.id]},
+            {'path': [self.destination.id, self.source_with_children.id]},
+            {'path': [self.destination.id, self.source_with_children.id]}
+        ])
diff --git a/arkindex/documents/tests/test_move_element.py b/arkindex/documents/tests/test_move_element.py
new file mode 100644
index 0000000000000000000000000000000000000000..f47a47864f459b6de707c30c21c6b11676ba1c7d
--- /dev/null
+++ b/arkindex/documents/tests/test_move_element.py
@@ -0,0 +1,132 @@
+from unittest.mock import call, patch
+
+from django.urls import reverse
+from rest_framework import status
+
+from arkindex.documents.models import Corpus
+from arkindex.project.tests import FixtureAPITestCase
+from arkindex.users.models import Role
+
+
+class TestMoveElement(FixtureAPITestCase):
+
+    @classmethod
+    def setUpTestData(cls):
+        super().setUpTestData()
+        cls.source = cls.corpus.elements.get(name='Volume 1, page 1r')
+        cls.destination = cls.corpus.elements.get(name='Volume 2')
+
+    def test_move_element_requires_login(self):
+        with self.assertNumQueries(0):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(self.destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+    def test_move_element_requires_verified(self):
+        self.user.verified_email = False
+        self.user.save()
+        self.client.force_login(self.user)
+        with self.assertNumQueries(2):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(self.destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
+
+    def test_move_element_wrong_acl(self):
+        private_corpus = Corpus.objects.create(name='private', public=False)
+        private_element = private_corpus.elements.create(
+            type=private_corpus.types.create(slug='folder'),
+        )
+
+        self.client.force_login(self.user)
+        with self.assertNumQueries(6):
+            response = self.client.post(reverse('api:move-element'), {'source': str(private_element.id), 'destination': str(private_element.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {
+                'source': [f'Invalid pk "{private_element.id}" - object does not exist.'],
+                'destination': [f'Invalid pk "{private_element.id}" - object does not exist.']
+            }
+        )
+
+    def test_move_element_wrong_source(self):
+        self.client.force_login(self.user)
+        with self.assertNumQueries(6):
+            response = self.client.post(reverse('api:move-element'), {'source': '12341234-1234-1234-1234-123412341234', 'destination': str(self.destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'source': ['Invalid pk "12341234-1234-1234-1234-123412341234" - object does not exist.']}
+        )
+
+    def test_move_element_wrong_destination(self):
+        self.client.force_login(self.user)
+        with self.assertNumQueries(6):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': '12341234-1234-1234-1234-123412341234'}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'destination': ['Invalid pk "12341234-1234-1234-1234-123412341234" - object does not exist.']}
+        )
+
+    def test_move_element_same_source_destination(self):
+        self.client.force_login(self.user)
+        with self.assertNumQueries(6):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(self.source.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'destination': ['A source element cannot be moved into itself']}
+        )
+
+    def test_move_element_different_corpus(self):
+        corpus2 = Corpus.objects.create(name='new')
+        corpus2.memberships.create(user=self.user, level=Role.Contributor.value)
+        destination = corpus2.elements.create(type=corpus2.types.create(slug='folder'))
+
+        self.client.force_login(self.user)
+        with self.assertNumQueries(5):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'destination': ['A source element cannot be moved to a destination from another corpus']}
+        )
+
+    def test_move_element_destination_is_direct_parent(self):
+        destination = self.corpus.elements.get(name='Volume 1')
+
+        self.client.force_login(self.user)
+        with self.assertNumQueries(7):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'destination': [f"'{destination.id}' is already a direct parent of '{self.source.id}'"]}
+        )
+
+    def test_move_element_destination_is_child(self):
+        source = self.corpus.elements.get(name='Volume 1')
+        destination_id = self.source.id
+
+        self.client.force_login(self.user)
+        with self.assertNumQueries(8):
+            response = self.client.post(reverse('api:move-element'), {'source': str(source.id), 'destination': str(destination_id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
+        self.assertDictEqual(
+            response.json(),
+            {'destination': [f"'{destination_id}' is a child of element '{source.id}'"]}
+        )
+
+    @patch('arkindex.project.triggers.tasks.move_element.delay')
+    def test_move_element(self, delay_mock):
+        self.client.force_login(self.user)
+        with self.assertNumQueries(8):
+            response = self.client.post(reverse('api:move-element'), {'source': str(self.source.id), 'destination': str(self.destination.id)}, format='json')
+            self.assertEqual(response.status_code, status.HTTP_200_OK)
+
+        self.assertEqual(delay_mock.call_count, 1)
+        self.assertEqual(delay_mock.call_args, call(
+            source=self.source,
+            destination=self.destination,
+            user_id=self.user.id,
+            description=f"Moving element {self.source.name} to element {self.destination.name}"
+        ))
diff --git a/arkindex/project/api_v1.py b/arkindex/project/api_v1.py
index b9a9059e4ae7b73a0b25c9c711259a9a0b09bdfc..26d6b27adaff08e8748a89dedeb51cd6f75340fc 100644
--- a/arkindex/project/api_v1.py
+++ b/arkindex/project/api_v1.py
@@ -40,6 +40,7 @@ from arkindex.documents.api.elements import (
     ElementBulkCreate,
     ElementChildren,
     ElementMetadata,
+    ElementMove,
     ElementNeighbors,
     ElementParent,
     ElementParents,
@@ -126,6 +127,7 @@ api = [
         name='element-transcriptions-bulk'
     ),
     path('element/<uuid:child>/parent/<uuid:parent>/', ElementParent.as_view(), name='element-parent'),
+    path('element/move/', ElementMove.as_view(), name='move-element'),
 
     # Corpora
     path('corpus/', CorpusList.as_view(), name='corpus'),
diff --git a/arkindex/project/triggers.py b/arkindex/project/triggers.py
index 6169e3380d9ed764180d78c9735c5d226b1bd979..79f0686ba4f48451717a386cb08a8a9762ebb0d0 100644
--- a/arkindex/project/triggers.py
+++ b/arkindex/project/triggers.py
@@ -124,6 +124,19 @@ def worker_results_delete(corpus_id: UUID,
     )
 
 
+def move_element(source: Element, destination: Element, user_id: Optional[int] = None) -> None:
+    """
+    Move a source Element (and all of its children) to a destination Element.
+    Delete all source Element previous parents.
+    """
+    tasks.move_element.delay(
+        source=source,
+        destination=destination,
+        user_id=user_id,
+        description=f"Moving element {source.name} to element {destination.name}"
+    )
+
+
 def initialize_activity(process: DataImport):
     """
     Initialize activity on every process elements for worker versions that are part of its workflow
diff --git a/arkindex/sql_validation/element_move_with_children.sql b/arkindex/sql_validation/element_move_with_children.sql
new file mode 100644
index 0000000000000000000000000000000000000000..ce66cc7e13d7962b925063b781c0e767207d63c6
--- /dev/null
+++ b/arkindex/sql_validation/element_move_with_children.sql
@@ -0,0 +1,238 @@
+SELECT "documents_elementpath"."id",
+       "documents_elementpath"."element_id",
+       "documents_elementpath"."path",
+       "documents_elementpath"."ordering"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."element_id" = '{source_id}'::uuid;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{parent_id}'::uuid
+LIMIT 21;
+
+SAVEPOINT "{savepoints[0]}";
+
+SELECT (1) AS "a"
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" = '{source_id}'::uuid
+       AND "documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{parent_id}'::uuid)
+LIMIT 1;
+
+SELECT (1) AS "a"
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" = '{source_id}'::uuid
+       AND NOT ("documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{parent_id}'::uuid))
+LIMIT 1;
+
+SELECT DISTINCT "documents_element"."id",
+                "documents_element"."created",
+                "documents_element"."updated",
+                "documents_element"."corpus_id",
+                "documents_element"."type_id",
+                "documents_element"."name",
+                "documents_element"."zone_id",
+                "documents_element"."worker_version_id",
+                "documents_elementpath"."ordering"
+FROM "documents_element"
+INNER JOIN "documents_elementpath" ON ("documents_element"."id" = "documents_elementpath"."element_id")
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[]
+ORDER BY "documents_elementpath"."ordering" ASC;
+
+SELECT "documents_elementpath"."id",
+       "documents_elementpath"."element_id",
+       "documents_elementpath"."path",
+       "documents_elementpath"."ordering"
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" IN ('{children_ids[2]}'::uuid,
+                                                '{children_ids[3]}'::uuid,
+                                                '{children_ids[0]}'::uuid,
+                                                '{children_ids[1]}'::uuid)
+       AND "documents_elementpath"."path" @ > ARRAY['{parent_id}'::uuid]::uuid[]);
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[0]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[1]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[2]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[3]}'::uuid
+LIMIT 21;
+
+SELECT DISTINCT "documents_element"."id",
+                "documents_element"."created",
+                "documents_element"."updated",
+                "documents_element"."corpus_id",
+                "documents_element"."type_id",
+                "documents_element"."name",
+                "documents_element"."zone_id",
+                "documents_element"."worker_version_id",
+                "documents_elementpath"."ordering"
+FROM "documents_element"
+INNER JOIN "documents_elementpath" ON ("documents_element"."id" = "documents_elementpath"."element_id")
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[]
+ORDER BY "documents_elementpath"."ordering" ASC;
+
+DELETE
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" IN ('{children_ids[2]}'::uuid,
+                                                '{children_ids[3]}'::uuid,
+                                                '{children_ids[0]}'::uuid,
+                                                '{children_ids[1]}'::uuid,
+                                                '{source_id}'::uuid)
+       AND "documents_elementpath"."path" @ > ARRAY['{parent_id}'::uuid]::uuid[]);
+
+INSERT INTO "documents_elementpath" ("id",
+                                     "element_id",
+                                     "path",
+                                     "ordering")
+VALUES ('{paths_ids[0]}'::uuid, '{children_ids[0]}'::uuid, ARRAY['{source_id}'::uuid]::uuid[], 1),
+       ('{paths_ids[1]}'::uuid, '{children_ids[1]}'::uuid, ARRAY['{source_id}'::uuid]::uuid[], 2),
+       ('{paths_ids[2]}'::uuid, '{children_ids[2]}'::uuid, ARRAY['{source_id}'::uuid]::uuid[], 0),
+       ('{paths_ids[3]}'::uuid, '{children_ids[3]}'::uuid, ARRAY['{source_id}'::uuid]::uuid[], 0);
+
+RELEASE SAVEPOINT "{savepoints[0]}";
+
+SAVEPOINT "{savepoints[1]}";
+
+SELECT "documents_elementpath"."element_id",
+       "documents_elementpath"."path"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."element_id" IN ('{source_id}'::uuid,
+                                               '{destination_id}'::uuid);
+
+SELECT "documents_elementtype"."id",
+       "documents_elementtype"."corpus_id",
+       "documents_elementtype"."slug",
+       "documents_elementtype"."display_name",
+       "documents_elementtype"."folder",
+       "documents_elementtype"."indexable"
+FROM "documents_elementtype"
+WHERE "documents_elementtype"."id" = '{page_type_id}'::uuid
+LIMIT 21;
+
+SELECT (MAX("documents_elementpath"."ordering") + 1) AS "max"
+FROM "documents_elementpath"
+INNER JOIN "documents_element" ON ("documents_elementpath"."element_id" = "documents_element"."id")
+WHERE ("documents_element"."type_id" = '{page_type_id}'::uuid
+       AND "documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{destination_id}'::uuid
+       AND "documents_elementpath"."path" && ARRAY['{destination_id}'::uuid]::uuid[]);
+
+SELECT "documents_elementpath"."id",
+       "documents_elementpath"."element_id",
+       "documents_elementpath"."path",
+       "documents_elementpath"."ordering"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[];
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[0]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[1]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[2]}'::uuid
+LIMIT 21;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{children_ids[3]}'::uuid
+LIMIT 21;
+
+DELETE
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."id" IN ('{paths_ids[0]}'::uuid,
+                                       '{paths_ids[1]}'::uuid,
+                                       '{paths_ids[2]}'::uuid,
+                                       '{paths_ids[3]}'::uuid);
+
+INSERT INTO "documents_elementpath" ("id",
+                                     "element_id",
+                                     "path",
+                                     "ordering")
+VALUES ('{paths_ids[4]}'::uuid, '{source_id}'::uuid, ARRAY['{destination_id}'::uuid]::uuid[], 3),
+       ('{paths_ids[5]}'::uuid, '{children_ids[0]}'::uuid, ARRAY['{destination_id}'::uuid,'{source_id}'::uuid]::uuid[], 1),
+       ('{paths_ids[6]}'::uuid, '{children_ids[1]}'::uuid, ARRAY['{destination_id}'::uuid,'{source_id}'::uuid]::uuid[], 2),
+       ('{paths_ids[7]}'::uuid, '{children_ids[2]}'::uuid, ARRAY['{destination_id}'::uuid,'{source_id}'::uuid]::uuid[], 0),
+       ('{paths_ids[8]}'::uuid, '{children_ids[3]}'::uuid, ARRAY['{destination_id}'::uuid,'{source_id}'::uuid]::uuid[], 0);
+
+RELEASE SAVEPOINT "{savepoints[1]}"
\ No newline at end of file
diff --git a/arkindex/sql_validation/element_move_without_child.sql b/arkindex/sql_validation/element_move_without_child.sql
new file mode 100644
index 0000000000000000000000000000000000000000..4f40c8fef6e68421498b3452818b6896e2672600
--- /dev/null
+++ b/arkindex/sql_validation/element_move_without_child.sql
@@ -0,0 +1,107 @@
+SELECT "documents_elementpath"."id",
+       "documents_elementpath"."element_id",
+       "documents_elementpath"."path",
+       "documents_elementpath"."ordering"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."element_id" = '{source_id}'::uuid;
+
+SELECT "documents_element"."id",
+       "documents_element"."created",
+       "documents_element"."updated",
+       "documents_element"."corpus_id",
+       "documents_element"."type_id",
+       "documents_element"."name",
+       "documents_element"."zone_id",
+       "documents_element"."worker_version_id"
+FROM "documents_element"
+WHERE "documents_element"."id" = '{parent_id}'::uuid
+LIMIT 21;
+
+SAVEPOINT "{savepoints[0]}";
+
+SELECT (1) AS "a"
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" = '{source_id}'::uuid
+       AND "documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{parent_id}'::uuid)
+LIMIT 1;
+
+SELECT (1) AS "a"
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" = '{source_id}'::uuid
+       AND NOT ("documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{parent_id}'::uuid))
+LIMIT 1;
+
+SELECT DISTINCT "documents_element"."id",
+                "documents_element"."created",
+                "documents_element"."updated",
+                "documents_element"."corpus_id",
+                "documents_element"."type_id",
+                "documents_element"."name",
+                "documents_element"."zone_id",
+                "documents_element"."worker_version_id",
+                "documents_elementpath"."ordering"
+FROM "documents_element"
+INNER JOIN "documents_elementpath" ON ("documents_element"."id" = "documents_elementpath"."element_id")
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[]
+ORDER BY "documents_elementpath"."ordering" ASC;
+
+SELECT DISTINCT "documents_element"."id",
+                "documents_element"."created",
+                "documents_element"."updated",
+                "documents_element"."corpus_id",
+                "documents_element"."type_id",
+                "documents_element"."name",
+                "documents_element"."zone_id",
+                "documents_element"."worker_version_id",
+                "documents_elementpath"."ordering"
+FROM "documents_element"
+INNER JOIN "documents_elementpath" ON ("documents_element"."id" = "documents_elementpath"."element_id")
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[]
+ORDER BY "documents_elementpath"."ordering" ASC;
+
+DELETE
+FROM "documents_elementpath"
+WHERE ("documents_elementpath"."element_id" IN ('{source_id}'::uuid)
+       AND "documents_elementpath"."path" @ > ARRAY['{parent_id}'::uuid]::uuid[]);
+
+RELEASE SAVEPOINT "{savepoints[0]}";
+
+SAVEPOINT "{savepoints[1]}";
+
+SELECT "documents_elementpath"."element_id",
+       "documents_elementpath"."path"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."element_id" IN ('{source_id}'::uuid,
+                                               '{destination_id}'::uuid);
+
+SELECT "documents_elementtype"."id",
+       "documents_elementtype"."corpus_id",
+       "documents_elementtype"."slug",
+       "documents_elementtype"."display_name",
+       "documents_elementtype"."folder",
+       "documents_elementtype"."indexable"
+FROM "documents_elementtype"
+WHERE "documents_elementtype"."id" = '{page_type_id}'::uuid
+LIMIT 21;
+
+SELECT (MAX("documents_elementpath"."ordering") + 1) AS "max"
+FROM "documents_elementpath"
+INNER JOIN "documents_element" ON ("documents_elementpath"."element_id" = "documents_element"."id")
+WHERE ("documents_element"."type_id" = '{page_type_id}'::uuid
+       AND "documents_elementpath"."path"[array_length("documents_elementpath"."path", 1)] = '{destination_id}'::uuid
+       AND "documents_elementpath"."path" && ARRAY['{destination_id}'::uuid]::uuid[]);
+
+SELECT "documents_elementpath"."id",
+       "documents_elementpath"."element_id",
+       "documents_elementpath"."path",
+       "documents_elementpath"."ordering"
+FROM "documents_elementpath"
+WHERE "documents_elementpath"."path" @ > ARRAY['{source_id}'::uuid]::uuid[];
+
+INSERT INTO "documents_elementpath" ("id",
+                                     "element_id",
+                                     "path",
+                                     "ordering")
+VALUES ('{path_id}'::uuid, '{source_id}'::uuid, ARRAY['{destination_id}'::uuid]::uuid[], 3);
+
+RELEASE SAVEPOINT "{savepoints[1]}"
\ No newline at end of file