Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • arkindex/backend
1 result
Show changes
Commits on Source (11)
......@@ -34,4 +34,14 @@ RUN chown -R ark:teklia /logs
# Run with Daphne
EXPOSE 80
CMD ["daphne", "--verbosity=1", "--bind=0.0.0.0", "--port=80", "arkindex.project.asgi:application"]
CMD [ \
"daphne", \
"--http-timeout=30", \
"--websocket_connect_timeout=10", \
"--websocket_timeout=120", \
"--ping-timeout=120", \
"--application-close-timeout=3", \
"--verbosity=1", \
"--bind=0.0.0.0", \
"--port=80", \
"arkindex.project.asgi:application"]
......@@ -39,6 +39,9 @@ class DataImport(IndexableModel):
return State.Unscheduled
# This allows annotating a DataImport queryset with "last_run" and preventing duplicate SQL queries
if hasattr(self, 'last_run'):
# last_run may be None when there is a workflow without any tasks
if self.last_run is None:
return State.Unscheduled
return self.workflow.get_state(self.last_run)
else:
return self.workflow.state
......
......@@ -93,6 +93,21 @@ class TestImports(FixtureAPITestCase):
self.assertEqual(len(data['results']), 1)
self.assertEqual(data['results'][0]['id'], str(dataimport2.id))
def test_list_no_tasks(self):
"""
Ensure the DataImport reports an Unscheduled state when there are no tasks in its workflow
"""
self.assertIsNone(self.dataimport.workflow)
self.dataimport.start()
self.dataimport.workflow.tasks.all().delete()
self.client.force_login(self.user)
response = self.client.get(reverse('api:import-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(len(data['results']), 1)
self.assertEqual(data['results'][0]['state'], State.Unscheduled.value)
def test_details_requires_login(self):
response = self.client.get(reverse('api:import-details', kwargs={'pk': self.dataimport.id}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
......@@ -133,6 +148,19 @@ class TestImports(FixtureAPITestCase):
data = response.json()
self.assertEqual(data['id'], str(self.dataimport.id))
def test_details_no_tasks(self):
"""
Ensure the DataImport reports an Unscheduled state when there are no tasks in its workflow
"""
self.assertIsNone(self.dataimport.workflow)
self.dataimport.start()
self.dataimport.workflow.tasks.all().delete()
self.client.force_login(self.user)
response = self.client.get(reverse('api:import-details', kwargs={'pk': self.dataimport.id}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()
self.assertEqual(data['state'], State.Unscheduled.value)
def test_retry_requires_login(self):
response = self.client.post(reverse('api:import-retry', kwargs={'pk': self.dataimport.id}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
......
......@@ -10,7 +10,7 @@ from rest_framework import status, response
from rest_framework.response import Response
from arkindex_common.enums import TranscriptionType
from arkindex.documents.models import (
Corpus, Element, ElementPath, Right,
Corpus, Element, ElementPath, Right, MetaData,
Classification, ClassificationState, Transcription, Region
)
from arkindex.documents.serializers.elements import (
......@@ -789,3 +789,24 @@ class ElementMetadata(CreateAPIView):
if self.request: # Ignore this step when generating the schema with OpenAPI
context['element'] = self.get_object()
return context
class MetadataDestroy(DestroyAPIView):
"""
Delete an existing element medadata
"""
permission_classes = (IsVerified, )
serializer_class = MetaDataLightSerializer
openapi_overrides = {
'operationId': 'DestroyMetaData',
'tags': ['elements']
}
def get_queryset(self):
# Filter readable metadata in order to check object permissions
return MetaData.objects.filter(element__corpus__in=Corpus.objects.readable(self.request.user))
def check_object_permissions(self, request, obj):
super().check_object_permissions(request, obj)
if Right.Write not in obj.element.corpus.get_acl_rights(request.user):
self.permission_denied(request, message='You do not have write access to this corpus.')
......@@ -5,6 +5,7 @@ from django.db.models.deletion import Collector
from django.db.models.signals import pre_delete
from arkindex_common.enums import MetaType
from arkindex.project.tools import disconnect_signal
from arkindex.project.triggers import reindex_start
from arkindex.dataimport.models import Event, Revision
from arkindex.documents.indexer import Indexer
from arkindex.documents.models import (
......@@ -141,6 +142,15 @@ class MLResultsConsumer(SyncConsumer):
transcriptions__isnull=True,
), batch_size)
# Trigger a reindexation to clear up deleted results from search indexes
reindex_start(
element=element_id,
corpus=corpus_id,
transcriptions=True,
elements=True,
entities=True,
)
class CorpusConsumer(SyncConsumer):
......
......@@ -240,10 +240,11 @@ class ElementCreateSerializer(ElementLightSerializer):
required=False
)
parent = serializers.PrimaryKeyRelatedField(queryset=Element.objects.none(), required=False)
polygon = PolygonField(required=False)
class Meta(ElementLightSerializer.Meta):
model = Element
fields = ElementLightSerializer.Meta.fields + ('image', 'corpus', 'parent')
fields = ElementLightSerializer.Meta.fields + ('image', 'corpus', 'parent', 'polygon')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
......@@ -306,14 +307,16 @@ class ElementCreateSerializer(ElementLightSerializer):
image = validated_data.pop('image', None)
parent = validated_data.pop('parent', None)
metadata = validated_data.pop('metadata', {})
polygon = validated_data.pop('polygon', None)
element = Element(**validated_data)
element.full_clean()
if image:
# Create a new zone containing the image
polygon = polygon if polygon else Polygon.from_coords(0, 0, image.width, image.height)
zone, _ = image.zones.get_or_create(
polygon=Polygon.from_coords(0, 0, image.width, image.height),
polygon=polygon,
)
# update Element zone
element.zone = zone
......
from unittest.mock import patch
from asyncmock import AsyncMock
from django.db.models import Q
from arkindex_common.enums import TranscriptionType, MetaType, EntityType
from arkindex.project.polygon import Polygon
......@@ -91,7 +93,9 @@ class TestMLResultsConsumer(FixtureTestCase):
with self.assertRaises(AssertionError):
MLResultsConsumer({}).ml_results_delete({})
def test_delete_corpus(self):
@patch('arkindex.project.triggers.get_channel_layer')
def test_delete_corpus(self, get_layer_mock):
get_layer_mock.return_value.send = AsyncMock()
querysets = self._get_querysets(self.corpus.elements.all())
for queryset in querysets:
......@@ -115,7 +119,19 @@ class TestMLResultsConsumer(FixtureTestCase):
self.assertEqual(metadata.type, MetaType.Text)
self.assertIsNone(metadata.entity)
def test_delete_element(self):
get_layer_mock().send.assert_called_once_with('reindex', {
'type': 'reindex.start',
'element': None,
'corpus': str(self.corpus.id),
'transcriptions': True,
'elements': True,
'entities': True,
'drop': False,
})
@patch('arkindex.project.triggers.get_channel_layer')
def test_delete_element(self, get_layer_mock):
get_layer_mock.return_value.send = AsyncMock()
folder1_querysets = self._get_querysets(
Element.objects.filter(Q(id=self.folder1.id) | Q(paths__path__last=self.folder1.id))
)
......@@ -144,3 +160,13 @@ class TestMLResultsConsumer(FixtureTestCase):
metadata = self.page2.metadatas.get(value='Some entity 2')
self.assertEqual(metadata.type, MetaType.Entity)
self.assertIsNotNone(metadata.entity)
get_layer_mock().send.assert_called_once_with('reindex', {
'type': 'reindex.start',
'element': str(self.folder1.id),
'corpus': str(self.corpus.id),
'transcriptions': True,
'elements': True,
'entities': True,
'drop': False,
})
......@@ -293,6 +293,33 @@ class TestElementsAPI(FixtureAPITestCase):
self.assertEqual(act.name, 'Castle story')
self.assertEqual(act.type, self.act_type)
def test_create_element_polygon(self):
# Create an element with a polygon to an existing volume
polygon = [[10, 10], [10, 40], [40, 40], [40, 10], [10, 10]]
self.client.force_login(self.user)
request = self.make_create_request(
parent=str(self.vol.id),
elt_type='page',
name='The castle of my dreams again',
image=str(self.image.id),
polygon=polygon
)
response = self.client.post(**request)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = response.json()
page = self.corpus.elements.get(id=data['id'])
self.assertDictEqual(
data,
{
'id': str(page.id),
'corpus': str(page.corpus.id),
'name': page.name,
'type': page.type.slug,
}
)
self.assertEqual(page.name, 'The castle of my dreams again')
self.assertEqual(page.type, self.page_type)
def test_create_parent_different_corpus(self):
self.client.force_login(self.user)
new_corpus = Corpus.objects.create(name='new')
......
from django.urls import reverse
from rest_framework import status
from arkindex.documents.models import Corpus
from arkindex.project.tests import FixtureAPITestCase
from arkindex_common.enums import MetaType
class TestElementsAPI(FixtureAPITestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.vol = cls.corpus.elements.get(name='Volume 1')
cls.private_corpus = Corpus.objects.create(name='private')
cls.private_vol = cls.private_corpus.elements.create(type=cls.vol.type, name='Vol')
def setUp(self):
super().setUp()
self.metadata = self.vol.metadatas.create(type=MetaType.Date, name='leet', value='1337')
self.private_metadata = self.private_vol.metadatas.create(type=MetaType.Text, name='password', value='123')
def test_delete_metadata_verified(self):
response = self.client.delete(reverse('api:metadata-destroy', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_metadata_methods(self):
self.client.force_login(self.user)
methods = (self.client.post, self.client.get, self.client.patch, self.client.put)
for method in methods:
response = method(reverse('api:metadata-destroy', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_delete_metadata(self):
self.client.force_login(self.user)
self.assertEqual(self.vol.metadatas.count(), 1)
response = self.client.delete(reverse('api:metadata-destroy', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.vol.metadatas.count(), 0)
def test_delete_metadata_private_corpus(self):
self.client.force_login(self.user)
response = self.client.delete(reverse('api:metadata-destroy', kwargs={'pk': str(self.private_metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_metadata_readable_element(self):
"""
An explicit message should be raised when user can read but not delete metadata
"""
self.client.force_login(self.user)
self.private_corpus.corpus_right.create(user_id=self.user.id, can_write=False)
response = self.client.delete(reverse('api:metadata-destroy', kwargs={'pk': str(self.private_metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertDictEqual(response.json(), {'detail': 'You do not have write access to this corpus.'})
......@@ -2,9 +2,9 @@ from django.urls import path
from django.views.generic.base import RedirectView
from arkindex.documents.api.elements import (
ElementsList, RelatedElementsList, ElementRetrieve, CorpusList, CorpusRetrieve,
ElementTranscriptions, ElementsCreate, ElementRegions, RegionDetails, RegionCreate, RegionBulkCreate,
ElementNeighbors, ElementParent, ElementParents, ElementChildren, ElementMetadata, ManageSelection
ElementsList, RelatedElementsList, ElementRetrieve, CorpusList, CorpusRetrieve, ElementTranscriptions,
ElementsCreate, ElementRegions, RegionDetails, RegionCreate, RegionBulkCreate, ElementNeighbors,
ElementParent, ElementParents, ElementChildren, ElementMetadata, MetadataDestroy, ManageSelection
)
from arkindex.documents.api.search import ElementSearch, EntitySearch
from arkindex.documents.api.ml import (
......@@ -111,6 +111,9 @@ api = [
path('transcription/<uuid:pk>/entity/', TranscriptionEntityCreate.as_view(), name='transcription-entity-create'),
path('transcription/<uuid:pk>/entities/', TranscriptionEntities.as_view(), name='transcription-entities'),
# Ingest metadata
path('metadata/<uuid:pk>/', MetadataDestroy.as_view(), name='metadata-destroy'),
# Git import workflows
path('imports/repos/', RepositoryList.as_view(), name='repository-list'),
path('imports/repos/<uuid:pk>/', RepositoryRetrieve.as_view(), name='repository-retrieve'),
......