Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • arkindex/backend
1 result
Show changes
Commits on Source (15)
Showing
with 314 additions and 95 deletions
......@@ -11,7 +11,7 @@ include:
# For jobs that run backend scripts directly
.backend-setup:
image: registry.gitlab.com/teklia/arkindex/backend/base:django-4.0.4
image: registry.gitlab.com/teklia/arkindex/backend/base:django-4.1.4
cache:
paths:
......
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.0.4 as build
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.1.4 as build
RUN mkdir build
ADD . build
RUN cd build && python3 setup.py sdist
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.0.4
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.1.4
ARG PONOS_BRANCH=master
ARG PONOS_ID=10017043
ARG TRANSKRIBUS_BRANCH=master
......
......@@ -60,7 +60,7 @@ RUN python -m nuitka \
arkindex/manage.py
# Start over from a clean setup
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.0.4 as build
FROM registry.gitlab.com/teklia/arkindex/backend/base:django-4.1.4 as build
# Import files from compilation
RUN mkdir /usr/share/arkindex
......
1.3.5
1.3.6-beta1
default_app_config = 'arkindex.documents.apps.DocumentsConfig'
......@@ -58,7 +58,7 @@ from arkindex.documents.serializers.elements import (
ElementTypeSerializer,
MetaDataBulkSerializer,
MetaDataCreateSerializer,
MetaDataUpdateSerializer,
MetaDataSerializer,
SelectionMoveSerializer,
SelectionParentCreateSerializer,
)
......@@ -1660,7 +1660,7 @@ class ElementMetadata(ListCreateAPIView):
)
else:
qs = element.metadatas.all()
return qs.select_related('entity')
return qs.select_related('entity__worker_run', 'worker_run')
def get_queryset(self):
if self.request and self.request.method == 'GET':
......@@ -1737,11 +1737,16 @@ class MetadataEdit(ACLMixin, RetrieveUpdateDestroyAPIView):
Edit an existing element medadata
"""
permission_classes = (IsVerified, )
serializer_class = MetaDataUpdateSerializer
serializer_class = MetaDataSerializer
def get_queryset(self):
# Filter readable metadata in order to check object permissions
return MetaData.objects.select_related('element__corpus').filter(element__corpus__in=Corpus.objects.readable(self.request.user))
return (
MetaData
.objects
.select_related('element__corpus', 'worker_run', 'entity__worker_run')
.filter(element__corpus__in=Corpus.objects.readable(self.request.user))
)
def check_object_permissions(self, request, obj):
super().check_object_permissions(request, obj)
......
......@@ -34,17 +34,6 @@ from arkindex.users.models import Role
from arkindex.users.utils import get_max_level
class MetaDataSerializer(MetaDataLightSerializer):
"""
Serialises some Metadata for any Element
"""
entity = BaseEntitySerializer(read_only=True)
class Meta:
model = MetaData
fields = MetaDataLightSerializer.Meta.fields + ('entity', )
def metadata_number_validator(data, serializer):
# Those can be None if someone messed up their input; DRF will catch it itself afterwards, so we ignore None.
meta_type = data.get('type', serializer.instance.type if serializer.instance else None)
......@@ -85,10 +74,8 @@ metadata_number_validator.requires_context = True
metadata_url_validator.requires_context = True
class MetaDataUpdateSerializer(MetaDataSerializer):
"""
Allow editing MetaData
"""
class MetaDataSerializer(MetaDataLightSerializer):
entity = BaseEntitySerializer(read_only=True, allow_null=True)
entity_id = serializers.PrimaryKeyRelatedField(
source='entity',
queryset=Entity.objects.none(),
......@@ -97,15 +84,23 @@ class MetaDataUpdateSerializer(MetaDataSerializer):
allow_null=True,
style={'base_template': 'input.html'},
)
worker_run = WorkerRunSummarySerializer(read_only=True, allow_null=True)
class Meta:
model = MetaData
fields = MetaDataSerializer.Meta.fields + ('entity_id', 'worker_version', 'worker_run_id', )
read_only_fields = ('worker_version', 'worker_run_id', )
class Meta(MetaDataLightSerializer.Meta):
fields = MetaDataLightSerializer.Meta.fields + (
'entity_id',
'entity',
'worker_version',
'worker_run',
)
read_only_fields = MetaDataLightSerializer.Meta.read_only_fields + (
'entity',
'worker_version',
'worker_run',
)
validators = [
metadata_number_validator,
metadata_url_validator,
WorkerRunOrVersionValidator(worker_version_field='worker_version', ),
]
def __init__(self, *args, **kwargs):
......@@ -127,7 +122,7 @@ class MetaDataUpdateSerializer(MetaDataSerializer):
return data
class MetaDataCreateSerializer(MetaDataUpdateSerializer):
class MetaDataCreateSerializer(MetaDataSerializer):
worker_version = serializers.PrimaryKeyRelatedField(
queryset=WorkerVersion.objects.all(),
required=False,
......@@ -137,13 +132,20 @@ class MetaDataCreateSerializer(MetaDataUpdateSerializer):
worker_run_id = serializers.PrimaryKeyRelatedField(
queryset=WorkerRun.objects.all(),
required=False,
write_only=True,
allow_null=True,
style={'base_template': 'input.html'},
source='worker_run'
)
class Meta(MetaDataUpdateSerializer.Meta):
class Meta(MetaDataSerializer.Meta):
fields = MetaDataSerializer.Meta.fields + (
'worker_run_id',
)
read_only_fields = ()
validators = MetaDataSerializer.Meta.validators + [
WorkerRunOrVersionValidator(worker_version_field='worker_version'),
]
class MetaDataBulkItemSerializer(MetaDataLightSerializer):
......
......@@ -105,9 +105,6 @@ class CorpusLightSerializer(serializers.ModelSerializer):
class MetaDataLightSerializer(serializers.ModelSerializer):
"""
Serialize a metadata without its entity
"""
type = EnumField(MetaType)
dates = InterpretedDateSerializer(many=True, source='get_dates', read_only=True)
value = MetaDataValueField(
......
......@@ -50,7 +50,7 @@ class TestMetaData(FixtureAPITestCase):
'dates': [],
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
},
])
......@@ -68,7 +68,7 @@ class TestMetaData(FixtureAPITestCase):
'dates': [],
'entity': None,
'worker_version': None,
'worker_run_id': None,
'worker_run': None,
},
])
......@@ -102,7 +102,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '123',
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
},
{
'id': str(entity_meta.id),
......@@ -121,7 +121,7 @@ class TestMetaData(FixtureAPITestCase):
'worker_run': None,
},
'worker_version': None,
'worker_run_id': None,
'worker_run': None,
},
])
......@@ -141,10 +141,36 @@ class TestMetaData(FixtureAPITestCase):
'value': '123',
'entity': None,
'worker_version': str(self.worker_version.id),
'worker_run_id': None
'worker_run': None
},
])
def test_list_with_worker_run(self):
self.metadata.worker_version = self.worker_version
self.metadata.worker_run = self.worker_run
self.metadata.save()
self.client.force_login(self.user)
with self.assertNumQueries(6):
response = self.client.get(reverse('api:element-metadata', kwargs={'pk': str(self.vol.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(response.json(), [
{
'id': str(self.metadata.id),
'name': 'folio',
'type': 'text',
'value': '123',
'entity': None,
'dates': [],
'worker_version': str(self.worker_version.id),
'worker_run': {
'id': str(self.worker_run.id),
'summary': 'Worker Document layout analyser @ 418bd4',
}
}
])
def test_list_wrong_acl(self):
self.vol.corpus = self.private_corpus
self.vol.save()
......@@ -173,7 +199,7 @@ class TestMetaData(FixtureAPITestCase):
'name': 'folio',
'type': 'text',
'value': '1r',
'worker_run_id': None,
'worker_run': None,
'worker_version': None,
}])
......@@ -207,7 +233,7 @@ class TestMetaData(FixtureAPITestCase):
'dates': [],
'entity': None,
'worker_version': None,
'worker_run_id': None,
'worker_run': None,
},
{
'dates': [],
......@@ -217,7 +243,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '1r',
'entity': None,
'worker_version': None,
'worker_run_id': None,
'worker_run': None,
},
{
'dates': [],
......@@ -226,7 +252,7 @@ class TestMetaData(FixtureAPITestCase):
'name': 'weigth',
'type': 'numeric',
'value': 1337.0,
'worker_run_id': None,
'worker_run': None,
'worker_version': None,
},
],
......@@ -297,6 +323,19 @@ class TestMetaData(FixtureAPITestCase):
self.assertEqual(md.value, 'Texas')
self.assertEqual(md.worker_run_id, self.worker_run.id)
self.assertEqual(md.worker_version, self.worker_version)
self.assertDictEqual(response.json(), {
'id': str(md.id),
'name': 'location',
'type': 'location',
'value': 'Texas',
'entity': None,
'dates': [],
'worker_version': str(self.worker_version.id),
'worker_run': {
'id': str(self.worker_run.id),
'summary': 'Worker Document layout analyser @ 418bd4',
}
})
def test_create_metadata_worker_run_or_version(self):
self.client.force_login(self.user)
......@@ -339,7 +378,7 @@ class TestMetaData(FixtureAPITestCase):
'year': 1885
}],
'worker_version': None,
'worker_run_id': None
'worker_run': None
})
def test_create_metadata_empty(self):
......@@ -508,7 +547,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '2019-12-04',
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
})
self.metadata.refresh_from_db()
self.assertEqual(self.metadata.type, MetaType.Date)
......@@ -641,7 +680,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '2019-12-04',
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
})
self.metadata.refresh_from_db()
self.assertEqual(self.metadata.type, MetaType.Date)
......@@ -667,7 +706,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '123',
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
})
self.metadata.refresh_from_db()
self.assertEqual(self.metadata.type, MetaType.Date)
......@@ -692,7 +731,7 @@ class TestMetaData(FixtureAPITestCase):
'value': '2019-12-04',
'entity': None,
'worker_version': None,
'worker_run_id': None
'worker_run': None
})
self.metadata.refresh_from_db()
self.assertEqual(self.metadata.type, MetaType.Text)
......@@ -831,8 +870,11 @@ class TestMetaData(FixtureAPITestCase):
def test_get_metadata(self):
self.client.force_login(self.user)
response = self.client.get(reverse('api:metadata-edit', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
with self.assertNumQueries(6):
response = self.client.get(reverse('api:metadata-edit', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.json(),
{
......@@ -843,7 +885,7 @@ class TestMetaData(FixtureAPITestCase):
'entity': None,
'dates': [],
'worker_version': None,
'worker_run_id': None
'worker_run': None
}
)
......@@ -854,8 +896,11 @@ class TestMetaData(FixtureAPITestCase):
self.metadata.type = MetaType.Numeric
self.metadata.save()
self.client.force_login(self.user)
response = self.client.get(reverse('api:metadata-edit', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
with self.assertNumQueries(6):
response = self.client.get(reverse('api:metadata-edit', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.json(),
{
......@@ -866,10 +911,34 @@ class TestMetaData(FixtureAPITestCase):
'entity': None,
'dates': [],
'worker_version': None,
'worker_run_id': None
'worker_run': None
}
)
def test_get_metadata_worker_run(self):
self.metadata.worker_version = self.worker_version
self.metadata.worker_run = self.worker_run
self.metadata.save()
self.client.force_login(self.user)
with self.assertNumQueries(6):
response = self.client.get(reverse('api:metadata-edit', kwargs={'pk': str(self.metadata.id)}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(response.json(), {
'id': str(self.metadata.id),
'name': 'folio',
'type': 'text',
'value': '123',
'entity': None,
'dates': [],
'worker_version': str(self.worker_version.id),
'worker_run': {
'id': str(self.worker_run.id),
'summary': 'Worker Document layout analyser @ 418bd4',
}
})
def test_create_metadata_entity(self):
self.client.force_login(self.superuser)
entity = self.corpus.entities.create(
......@@ -961,7 +1030,7 @@ class TestMetaData(FixtureAPITestCase):
'entity': None,
'dates': [],
'worker_version': None,
'worker_run_id': None
'worker_run': None
}
)
......@@ -980,7 +1049,7 @@ class TestMetaData(FixtureAPITestCase):
'entity': None,
'dates': [],
'worker_version': None,
'worker_run_id': None
'worker_run': None
}
)
......@@ -999,7 +1068,7 @@ class TestMetaData(FixtureAPITestCase):
'entity': None,
'dates': [],
'worker_version': None,
'worker_run_id': None
'worker_run': None
}
)
......
......@@ -190,12 +190,10 @@ class ImagePathDefault(object):
Required because the implicit UniqueTogetherValidator in the Image serializer requires
both server and path to either be required or have a default value.
"""
requires_context = True
def set_context(self, serializer_field):
self.serializer = serializer_field.parent
def __call__(self):
return str(self.serializer.fields['id'].default)
def __call__(self, serializer_field):
return str(serializer_field.parent.fields['id'].default)
class ImageUploadSerializer(ImageSerializer):
......
default_app_config = 'arkindex.process.apps.ProcessConfig'
......@@ -22,7 +22,7 @@ class ActivityManager(models.Manager):
assert isinstance(state, WorkerActivityState), 'State should be an instance of WorkerActivityState'
sql, params = elements_qs.values('id').query.sql_with_params()
sql, params = elements_qs.distinct().values('id').query.sql_with_params()
select_params = (worker_version_id, configuration_id, state.value, process_id) + params
# With ON CONFLICT, the target constraint is only optional when the action is DO NOTHING.
......
......@@ -2,7 +2,14 @@ from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from arkindex.documents.models import Corpus, Element
from arkindex.process.models import Process, ProcessMode, WorkerConfiguration, WorkerVersion, WorkerVersionGPUUsage
from arkindex.process.models import (
Process,
ProcessMode,
WorkerConfiguration,
WorkerVersion,
WorkerVersionGPUUsage,
WorkerVersionState,
)
from arkindex.project.mixins import TrainingModelMixin, WorkerACLMixin
from arkindex.training.models import Model, ModelVersion
......@@ -139,6 +146,9 @@ class StartTrainingSerializer(serializers.ModelSerializer, WorkerACLMixin, Train
):
raise ValidationError({'__all__': ['Train, validation and test folders must be different.']})
if data['worker_version_id'].state != WorkerVersionState.Available:
raise ValidationError({'worker_version_id': ['This worker version is not available.']})
# Ensure the worker version is compatible with the GPU mode
if data['worker_version_id'].gpu_usage == WorkerVersionGPUUsage.Disabled and data["use_gpu"]:
raise ValidationError({'__all__': ['This worker version does not support GPU mode.']})
......
......@@ -348,8 +348,18 @@ class WorkerConfigurationSerializer(WorkerConfigurationListSerializer):
configuration = serializers.DictField(allow_empty=False, read_only=True)
class Meta(WorkerConfigurationListSerializer.Meta):
# Only allow updating `archived`
read_only_fields = ('id', 'name', 'configuration')
# The configuration cannot be updated
read_only_fields = ('id', 'configuration')
def validate(self, data):
name = data.get('name')
data_archived = data.get('archived')
instance_archived = self.instance.archived
# Archived configurations cannot be renamed, but un-archiving a configuration and renaming it at the same time is
# possible. It's also possible to archive and rename a configuration simultaneously.
if instance_archived and data_archived is not False and name:
raise ValidationError({'name': 'Archived configurations cannot be renamed.'})
return data
class WorkerConfigurationExistsErrorSerializer(serializers.Serializer):
......
......@@ -13,6 +13,7 @@ from arkindex.process.models import (
WorkerRun,
WorkerVersion,
WorkerVersionGPUUsage,
WorkerVersionState,
)
from arkindex.project.tests import FixtureTestCase
from arkindex.training.models import Model, ModelVersion, ModelVersionState
......@@ -197,6 +198,23 @@ class TestCreateTrainingProcess(FixtureTestCase):
'__all__': ['Train, validation and test folders must be different.'],
})
def test_worker_version_not_available(self):
self.client.force_login(self.user)
# Every state other than Available should cause this error
states = set(WorkerVersionState) - {WorkerVersionState.Available}
for state in states:
with self.subTest(state=state):
self.training_worker_version.state = state
self.training_worker_version.save()
with self.assertNumQueries(11):
response = self.client.post(reverse('api:process-training'), self.base_payload)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.json(), {
'worker_version_id': ['This worker version is not available.']
})
def test_worker_version_gpu_required(self):
"""
An error is raised in case the GPU mode is incompatible with the worker
......
......@@ -1498,6 +1498,7 @@ class TestImports(FixtureAPITestCase):
self.assertEqual(activities_delay_mock.call_count, 1)
@override_settings(IMPORTS_WORKER_VERSION=uuid.uuid4())
@override_settings(ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks')
@override_settings(PONOS_RECIPE={'env': {'ARKINDEX_API_TOKEN': 'testToken'}})
def test_pdf_import_build_workflow_recipe(self):
"""
......@@ -1521,12 +1522,13 @@ class TestImports(FixtureAPITestCase):
'tasks': {
'import': {
'command': f'python -m arkindex_tasks.import_files {process.id}',
'image': 'registry.gitlab.com/teklia/arkindex/tasks'
'image': 'registry.teklia.com/tasks'
}
}
})
@override_settings(PONOS_RECIPE={'env': {'ARKINDEX_API_TOKEN': 'testToken'}})
@override_settings(ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks')
@override_settings(PUBLIC_HOSTNAME='https://arkindex.localhost')
def test_worker_run_model_version_build_workflow_recipe(self):
"""
......@@ -1553,7 +1555,7 @@ class TestImports(FixtureAPITestCase):
[
{
'command': f'python -m arkindex_tasks.init_elements {process.id} --chunks-number 1',
'image': 'registry.gitlab.com/teklia/arkindex/tasks'
'image': 'registry.teklia.com/tasks'
},
{
'image': run.version.docker_image_iid or run.version.docker_image_name,
......
......@@ -94,11 +94,13 @@ class TestTranskribusImport(FixtureAPITestCase):
@override_settings(
PONOS_RECIPE={},
ARKINDEX_FEATURES={"transkribus": True},
TRANSKRIBUS_EMAIL="arkindex@teklia.com",
TRANSKRIBUS_PASSWORD="averysecretpassword",
ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks',
)
@patch("transkribus.TranskribusAPI.list_user_collection")
@patch("arkindex.process.serializers.imports.TranskribusAPI")
def test_create_import(self, mock_transkribus):
mock_transkribus.return_value = [{"email": "nope@nope.fr"}, {"email": "arkindex@teklia.com"}]
mock_transkribus.return_value.list_user_collection.return_value = [{"email": "nope@nope.fr"}, {"email": "arkindex@teklia.com"}]
self.client.force_login(self.user)
transkribus_worker_version_id = None
with self.settings(TRANSKRIBUS_WORKER_VERSION=self.transkribus_worker_version.id):
......@@ -136,23 +138,23 @@ class TestTranskribusImport(FixtureAPITestCase):
'env': {
'ARKINDEX_PROCESS_ID': str(process.id),
'ARKINDEX_CORPUS_ID': str(corpus.id),
'TRANSKRIBUS_EMAIL': settings.TRANSKRIBUS_EMAIL,
'TRANSKRIBUS_PASSWORD': settings.TRANSKRIBUS_PASSWORD,
'TRANSKRIBUS_EMAIL': "arkindex@teklia.com",
'TRANSKRIBUS_PASSWORD': "averysecretpassword",
'TRANSKRIBUS_WORKER_VERSION': transkribus_worker_version_id
},
'tasks': {
'export_transkribus': {
'command': 'python -m arkindex_tasks.export_transkribus 12345',
'image': 'registry.gitlab.com/teklia/arkindex/tasks',
'image': 'registry.teklia.com/tasks',
},
'import_arkindex': {
'command': f'python -m arkindex_tasks.import_transkribus --job-path /data/export_transkribus/transkribus_export_job.json --corpus {corpus.id}',
'image': 'registry.gitlab.com/teklia/arkindex/tasks',
'image': 'registry.teklia.com/tasks',
'parents': ['export_transkribus']
},
'thumbnails': {
'command': 'python3 -m arkindex_tasks.generate_thumbnails /data/import_arkindex/elements.json',
'image': 'registry.gitlab.com/teklia/arkindex/tasks',
'image': 'registry.teklia.com/tasks',
'parents': ['import_arkindex']
}
}
......
......@@ -448,12 +448,12 @@ class TestWorkerConfigurations(FixtureAPITestCase):
with self.assertNumQueries(7):
response = self.client.put(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={'archived': True}
data={'archived': True, 'name': 'new name'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertEqual(self.worker_config.name, 'new name')
self.assertDictEqual(self.worker_config.configuration, {'key': 'value'})
self.assertTrue(self.worker_config.archived)
......@@ -471,12 +471,12 @@ class TestWorkerConfigurations(FixtureAPITestCase):
with self.assertNumQueries(6):
response = self.client.put(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={'archived': True}
data={'archived': True, 'name': 'new name'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertEqual(self.worker_config.name, 'new name')
self.assertDictEqual(self.worker_config.configuration, {'key': 'value'})
self.assertTrue(self.worker_config.archived)
......@@ -493,12 +493,12 @@ class TestWorkerConfigurations(FixtureAPITestCase):
with self.assertNumQueries(4):
response = self.client.put(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={'archived': True}
data={'archived': True, 'name': 'a new name'}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertEqual(self.worker_config.name, 'a new name')
self.assertDictEqual(self.worker_config.configuration, {'key': 'value'})
self.assertTrue(self.worker_config.archived)
......@@ -527,11 +527,63 @@ class TestWorkerConfigurations(FixtureAPITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertEqual(self.worker_config.name, 'new name')
self.assertTrue(self.worker_config.archived)
# The configuration was not updated
self.assertDictEqual(self.worker_config.configuration, {'key': 'value'})
# Only the archived state was updated
def test_update_archived_configuration_name(self):
"""
Archived configurations cannot be renamed with an update
"""
self.client.force_login(self.user)
self.worker_config.archived = True
self.worker_config.save()
self.assertTrue(self.worker_config.archived)
with self.assertNumQueries(8):
response = self.client.put(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={
'name': 'new name',
'archived': True,
},
format='json',
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.json(), {
'name': ['Archived configurations cannot be renamed.'],
})
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertTrue(self.worker_config.archived)
def test_update_archived_configuration_name_unarchive(self):
"""
Archived configurations can be renamed if they are also unarchived
"""
self.client.force_login(self.user)
self.worker_config.archived = True
self.worker_config.save()
self.assertTrue(self.worker_config.archived)
with self.assertNumQueries(9):
response = self.client.put(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={
'name': 'new name',
'archived': False,
},
format='json',
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'new name')
self.assertFalse(self.worker_config.archived)
def test_partial_update_requires_login(self):
with self.assertNumQueries(0):
response = self.client.patch(
......@@ -592,7 +644,7 @@ class TestWorkerConfigurations(FixtureAPITestCase):
def test_partial_update_contributor_repository(self):
"""
Can update a configuration with contributor rights on the repository
Can partial update a configuration with contributor rights on the repository
"""
self.worker_1.memberships.all().delete()
self.worker_1.repository.memberships.all().delete()
......@@ -615,7 +667,7 @@ class TestWorkerConfigurations(FixtureAPITestCase):
def test_partial_update_contributor_worker(self):
"""
Can update a configuration with contributor rights on the worker
Can partial update a configuration with contributor rights on the worker
"""
self.worker_1.memberships.all().delete()
self.worker_1.repository.memberships.all().delete()
......@@ -638,7 +690,7 @@ class TestWorkerConfigurations(FixtureAPITestCase):
def test_partial_update_admin(self):
"""
Admins can update any configuration
Admins can partial update any configuration
"""
self.worker_1.memberships.all().delete()
self.worker_1.repository.memberships.all().delete()
......@@ -660,7 +712,7 @@ class TestWorkerConfigurations(FixtureAPITestCase):
def test_partial_update_ignored_fields(self):
"""
Fields that should not be editable are ignored when sent in update requests
Fields that should not be editable are ignored when sent in partial update requests
"""
# Get as much rights as possible so that this test does not fail if something goes wrong with the endpoint's
# permissions; we do not care about rights in this test.
......@@ -683,7 +735,34 @@ class TestWorkerConfigurations(FixtureAPITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertEqual(self.worker_config.name, 'new name')
self.assertTrue(self.worker_config.archived)
# The configuration was not updated
self.assertDictEqual(self.worker_config.configuration, {'key': 'value'})
# Only the archived state was updated
def test_partial_update_archived_configuration_name(self):
"""
Archived configurations cannot be renamed with a partial update
"""
self.client.force_login(self.user)
self.worker_config.archived = True
self.worker_config.save()
self.assertTrue(self.worker_config.archived)
with self.assertNumQueries(8):
response = self.client.patch(
reverse('api:configuration-retrieve', kwargs={'pk': str(self.worker_config.id)}),
data={
'name': 'new name',
},
format='json',
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.json(), {
'name': ['Archived configurations cannot be renamed.'],
})
self.worker_config.refresh_from_db()
self.assertEqual(self.worker_config.name, 'config time')
self.assertTrue(self.worker_config.archived)
......@@ -64,6 +64,32 @@ class TestWorkerActivity(FixtureTestCase):
self.assertEqual(elements_qs.count(), 5)
self.assertEqual(WorkerActivity.objects.filter(state=WorkerActivityState.Started, process=self.process).count(), 5)
def test_bulk_insert_activity_duplicate_elements(self):
"""
WorkerActivity.bulk_insert should exclude duplicate elements
"""
element_type = self.corpus.types.first()
parent1 = self.corpus.elements.create(type=element_type, name='Parent 1')
parent2 = self.corpus.elements.create(type=element_type, name='Parent 2')
element = self.corpus.elements.create(type=element_type, name='Element')
child = self.corpus.elements.create(type=element_type, name='Child')
element.add_parent(parent1)
element.add_parent(parent2)
child.add_parent(element)
elements_qs = Element.objects.filter(paths__path__contains=[element.id], name='Child')
# `child` has two paths that both contain the ID of `element`, because `element` has two parents,
# so filtering on paths__path will duplicate the child
self.assertEqual(elements_qs.count(), 2)
WorkerActivity.objects.bulk_insert(
self.worker_version.id,
self.process.id,
self.configuration.id,
elements_qs,
state=WorkerActivityState.Started,
)
self.assertEqual(self.process.activities.filter(state=WorkerActivityState.Started).get().element_id, child.id)
def test_bulk_insert_activity_children(self):
"""
Bulk insert worker activities for acts
......
......@@ -502,6 +502,7 @@ class TestWorkflows(FixtureAPITestCase):
)
@patch('arkindex.project.triggers.process_tasks.initialize_activity.delay')
@override_settings(ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks')
def test_workers_multiple_worker_runs(self, activities_delay_mock):
process_2 = self.corpus.processes.create(
activity_state=ActivityState.Pending,
......@@ -543,7 +544,7 @@ class TestWorkflows(FixtureAPITestCase):
{
'command': 'python -m arkindex_tasks.init_elements '
f'{process_2.id} --chunks-number 1',
'image': 'registry.gitlab.com/teklia/arkindex/tasks'
'image': 'registry.teklia.com/tasks'
},
f'reco_{str(self.version_1.id)[0:6]}':
{
......@@ -597,6 +598,7 @@ class TestWorkflows(FixtureAPITestCase):
# Check that the corpus worker version cache has been updated
self.assertCountEqual(self.corpus.worker_versions.all(), [self.version_1, self.version_2])
@override_settings(ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks')
def test_create_process_use_cache_option(self):
"""
A process with the `use_cache` parameter creates an initialization task with the --use-cache flag
......@@ -624,7 +626,7 @@ class TestWorkflows(FixtureAPITestCase):
'command': 'python -m arkindex_tasks.init_elements '
f'{process_2.id} --chunks-number 1 '
'--use-cache',
'image': 'registry.gitlab.com/teklia/arkindex/tasks'
'image': 'registry.teklia.com/tasks'
},
f'reco_{str(self.version_1.id)[0:6]}':
{
......@@ -641,6 +643,7 @@ class TestWorkflows(FixtureAPITestCase):
},
})
@override_settings(ARKINDEX_TASKS_IMAGE='registry.teklia.com/tasks')
def test_create_process_use_gpu_option(self):
"""
A process with the `use_gpu` parameter enables the `requires_gpu` attribute of tasks than need one
......@@ -666,7 +669,7 @@ class TestWorkflows(FixtureAPITestCase):
{
'command': 'python -m arkindex_tasks.init_elements '
f'{process_2.id} --chunks-number 1',
'image': 'registry.gitlab.com/teklia/arkindex/tasks'
'image': 'registry.teklia.com/tasks'
},
f'worker-gpu_{str(self.version_3.id)[0:6]}':
{
......