Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • arkindex/backend
1 result
Show changes
Commits on Source (38)
Showing
with 146 additions and 113 deletions
# syntax=docker/dockerfile:1
FROM registry.gitlab.teklia.com/arkindex/backend/base:gitlab-teklia as build
RUN mkdir build
......@@ -41,7 +42,10 @@ RUN chown -R ark:teklia /backend_static
# Copy Version file
COPY VERSION /etc/arkindex.version
# Run with Daphne
HEALTHCHECK --start-period=1m --start-interval=1s --interval=1m --timeout=5s \
CMD wget --spider --quiet http://localhost/api/v1/public-key/ || exit 1
# Run with Gunicorn
ENV PORT 80
EXPOSE 80
CMD ["manage.py", "gunicorn", "--host=0.0.0.0"]
# syntax=docker/dockerfile:1
FROM python:3.10-slim-bookworm AS compilation
RUN apt-get update && apt-get install --no-install-recommends -y build-essential wget
......@@ -87,6 +88,9 @@ COPY arkindex/documents/export/*.sql /usr/share/arkindex/documents/export/
# Otherwise Django will not load the compiled module
RUN for cmd in $(cat /usr/share/arkindex/commands.txt); do mkdir -p $(dirname $cmd); touch $cmd; done
HEALTHCHECK --start-period=1m --start-interval=1s --interval=1m --timeout=5s \
CMD wget --spider --quiet http://localhost/api/v1/public-key/ || exit 1
# Run gunicorn server
ENV PORT=80
EXPOSE 80
......
1.5.1-beta1
1.5.1
......@@ -18,22 +18,24 @@ from arkindex.documents.models import (
MLClass,
Transcription,
)
from arkindex.users.admin import GroupMembershipInline, UserMembershipInline
class ElementTypeInline(admin.TabularInline):
model = ElementType
prepopulated_fields = {'slug': ('display_name', )}
fields = ('slug', 'display_name', 'folder', 'indexable')
readonly_fields = ('slug', 'display_name', 'folder')
def has_add_permission(self, request, obj=None):
return False
class CorpusExportInline(admin.TabularInline):
model = CorpusExport
def has_delete_permission(self, request, obj=None):
return False
class CorpusAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'public', 'top_level_type', 'created')
search_fields = ('name', )
inlines = (ElementTypeInline, UserMembershipInline, GroupMembershipInline, CorpusExportInline)
inlines = (ElementTypeInline, )
ordering = ('-created', )
def has_delete_permission(self, request, obj=None):
......
......@@ -1273,13 +1273,15 @@ class ElementNeighbors(ACLMixin, ListAPIView):
Requires a **read** access to the element's corpus.
"""
serializer_class = ElementNeighborsSerializer
pagination_class = None
# For OpenAPI type discovery
queryset = Element.objects.none()
def get_queryset(self):
element = get_object_or_404(
Element.objects.select_related('corpus').only('id', 'corpus__public'),
# Include the attributes required for ACL checks and the API response
Element.objects.select_related('corpus', 'type').only('id', 'name', 'type__slug', 'corpus__public'),
id=self.kwargs['pk']
)
......@@ -2255,7 +2257,7 @@ class WorkerResultsDestroy(CorpusACLMixin, DestroyAPIView):
errors['model_version_id'].append('Invalid UUID.')
else:
try:
model_version = ModelVersion.objects.get(id=model_version_id)
model_version = ModelVersion.objects.select_related('model').get(id=model_version_id)
except ModelVersion.DoesNotExist:
errors['model_version_id'].append('This model version does not exist.')
......
from datetime import datetime, timedelta, timezone
from datetime import timedelta
from textwrap import dedent
from django.conf import settings
from django.utils import timezone
from drf_spectacular.utils import extend_schema, extend_schema_view
from rest_framework import serializers, status
from rest_framework.exceptions import ValidationError
......@@ -12,9 +15,6 @@ from arkindex.project.mixins import CorpusACLMixin
from arkindex.project.permissions import IsVerified
from arkindex.users.models import Role
# Delay to generate a new export from a specific user
EXPORT_DELAY_HOURS = 6
@extend_schema(tags=['exports'])
@extend_schema_view(
......@@ -28,10 +28,15 @@ EXPORT_DELAY_HOURS = 6
post=extend_schema(
operation_id='StartExport',
request=None,
description=(
'Start a corpus export job.\n'
f'A user must wait {EXPORT_DELAY_HOURS} hours before being able to generate a new export of the same corpus.\n\n'
'Contributor access is required.'
description=dedent(
f"""
Start a corpus export job.
A user must wait for {settings.EXPORT_TTL_SECONDS} seconds after the last successful import
before being able to generate a new export of the same corpus.
Contributor access is required.
"""
),
)
)
......@@ -55,10 +60,10 @@ class CorpusExportAPIView(CorpusACLMixin, ListCreateAPIView):
available_exports = corpus.exports.filter(
state=CorpusExportState.Done,
created__gte=datetime.now(timezone.utc) - timedelta(hours=EXPORT_DELAY_HOURS)
created__gte=timezone.now() - timedelta(seconds=settings.EXPORT_TTL_SECONDS)
)
if available_exports.exists():
raise ValidationError(f'An export has already been made for this corpus in the last {EXPORT_DELAY_HOURS} hours.')
raise ValidationError(f'An export has already been made for this corpus in the last {settings.EXPORT_TTL_SECONDS} seconds.')
export = corpus.exports.create(user=self.request.user)
export.start()
......
......@@ -3927,7 +3927,6 @@
"slug": "docker_build",
"priority": 10,
"state": "completed",
"tags": "[]",
"image": "",
"shm_size": null,
"command": null,
......
......@@ -111,22 +111,24 @@ class Command(BaseCommand):
.filter(max_expiry__lt=timezone.now()) \
.exclude(id__in=worker_version_docker_image_processes)
tasks = Task.objects.filter(process__in=expired_processes)
artifacts = Artifact.objects.filter(task__process__in=expired_processes)
# A Ponos task can be linked to a Dataset when it provides the artifacts for a Dataset in a Complete state.
# As Datasets are meant to be stable, we do not want to destroy these tasks and artifacts.
tasks = Task.objects.filter(process__in=expired_processes, dataset__isnull=True)
artifacts = Artifact.objects.filter(task__in=tasks)
self.stdout.write(f'Removing {artifacts.count()} artifacts of expired processes from S3…')
for artifact in artifacts.select_related('task').iterator():
self.stdout.write(f'Removing artifact {artifact.s3.key}')
self.stdout.write(f'Removing artifact {artifact.s3_object.key}')
try:
artifact.s3.delete()
artifact.s3_delete()
except ClientError as e:
self.stdout.write(self.style.ERROR(str(e)))
self.stdout.write(f'Removing logs for {tasks.count()} tasks of expired processes from S3…')
for task in tasks.iterator():
self.stdout.write(f'Removing task log {task.s3_logs.key}')
self.stdout.write(f'Removing task log {task.logs.s3_object.key}')
try:
task.s3_logs.delete()
task.logs.s3_delete()
except ClientError as e:
self.stdout.write(self.style.ERROR(str(e)))
......
......@@ -193,47 +193,27 @@ class ElementManager(models.Manager):
def get_neighbors(self, element):
"""
Returns a list of neighboring ElementPaths for an element, with a prefetched `element` attribute
and a list of prefetched parent elements in the `parents` attribute.
Returns a list of neighboring ElementPaths for an element, with overridden attributes:
- `ElementPath.path` is an array of all elements in the path, instead of element IDs.
- `ElementPath.previous` is the element that precedes this one in the same parent, or None if there is none.
- `ElementPath.previous` is the element that succeeds this one in the same parent, or None if there is none.
"""
paths = list(self.get_neighbor_paths(element))
# Build a set of all IDs to load related elements (neighbors, parents) then load them into a dict
related_elt_ids = set(chain(
(element.id,),
related_elt_ids = set(filter(None, chain(
*((path.previous, path.next) for path in paths),
*(path.path for path in paths),
))
elements = {
elt.id: elt
for elt in (
self.filter(id__in=filter(None, related_elt_ids))
.select_related('type')
.only('id', 'type__slug', 'name')
)
}
)))
elements = self.select_related('type').only('id', 'type__slug', 'name').in_bulk(related_elt_ids)
# Generate an output corresponding to endpoint expectations (compatibility purpose)
output = []
for path in paths:
if path.previous:
output.append({
'ordering': path.previous_ord,
'element': elements.get(path.previous),
'parents': list(map(elements.get, path.path)),
})
output.append({
'ordering': path.ordering,
'element': elements.get(element.id),
'parents': list(map(elements.get, path.path)),
})
if path.next:
output.append({
'ordering': path.next_ord,
'element': elements.get(path.next),
'parents': list(map(elements.get, path.path)),
})
return output
path.element = element
path.previous = elements.get(path.previous)
path.next = elements.get(path.next)
path.path = list(map(elements.get, path.path))
return paths
class CorpusManager(models.Manager):
......
......@@ -13,6 +13,7 @@ from django.core.validators import MaxValueValidator, MinValueValidator, RegexVa
from django.db import connections, models, transaction
from django.db.models import Deferrable, Q
from django.db.models.functions import Cast, Least
from django.urls import reverse
from django.utils.functional import cached_property
from enumfields import Enum, EnumField
......@@ -59,6 +60,12 @@ class Corpus(IndexableModel):
def __str__(self):
return self.name
def get_absolute_url(self):
return urljoin(
settings.PUBLIC_HOSTNAME,
reverse('frontend-corpus-details', kwargs={'pk': self.id}),
)
def create_default_types(self):
self.types.bulk_create(
ElementType(corpus=self, **values)
......
......@@ -23,7 +23,7 @@ from arkindex.documents.serializers.light import (
from arkindex.documents.serializers.ml import ClassificationSerializer, WorkerRunSummarySerializer
from arkindex.images.models import Image
from arkindex.images.serializers import ZoneSerializer
from arkindex.ponos.models import Task
from arkindex.ponos.utils import get_process_from_task_auth
from arkindex.process.models import WorkerVersion
from arkindex.project.fields import Array
from arkindex.project.mixins import SelectionMixin
......@@ -441,16 +441,10 @@ class ElementSlimSerializer(ElementTinySerializer):
Only set the Thumbnail PUT URL for Ponos tasks that
are running the thumbnails generation on a folder.
"""
# TODO: This check would be simplified to process.thumbnails once that attribute
# is available, allowing to use the get_process_from_ponos_auth helper directly.
task = self.context.get('request') and self.context['request'].auth
if (
isinstance(task, Task)
and element.type.folder
and task.image == settings.ARKINDEX_TASKS_IMAGE
and "generate_thumbnails" in task.command
):
return element.thumbnail.s3_put_url
if element.type.folder:
process = get_process_from_task_auth(self.context['request'])
if process and process.generate_thumbnails:
return element.thumbnail.s3_put_url
class Meta(ElementTinySerializer.Meta):
model = Element
......@@ -673,14 +667,23 @@ class ElementSerializer(ElementSlimSerializer):
return instance
class ElementNeighborsSerializer(serializers.Serializer):
# position attribute is left for compatibility but represents the real path ordering
position = serializers.IntegerField(source='ordering')
element = ElementLightSerializer()
parents = serializers.ListField(
class ElementNeighborsSerializer(serializers.ModelSerializer):
previous = ElementLightSerializer(allow_null=True)
next = ElementLightSerializer(allow_null=True)
path = serializers.ListField(
child=ElementLightSerializer()
)
class Meta:
model = ElementPath
fields = (
'path',
'ordering',
'previous',
'next',
)
read_only_fields = fields
@extend_schema_serializer(deprecate_fields=('worker_version', ))
class ElementCreateSerializer(ElementLightSerializer):
......
......@@ -2,7 +2,7 @@ import uuid
from datetime import datetime, timedelta, timezone
from io import StringIO
from textwrap import dedent
from unittest.mock import MagicMock, call, patch
from unittest.mock import MagicMock, call, patch, seal
from botocore.exceptions import ClientError
from django.core.management import call_command
......@@ -13,7 +13,7 @@ from arkindex.images.models import Image, ImageServer
from arkindex.ponos.models import Artifact, Farm, Task
from arkindex.process.models import DataFile, GitRefType, Process, ProcessMode, Repository, WorkerVersionState
from arkindex.project.tests import FixtureTestCase
from arkindex.training.models import Model, ModelVersion
from arkindex.training.models import Dataset, Model, ModelVersion
@override_settings(AWS_EXPORT_BUCKET='export', PONOS_S3_ARTIFACTS_BUCKET='ponos-artifacts', PONOS_S3_LOGS_BUCKET='ponos-logs', AWS_TRAINING_BUCKET='training')
......@@ -114,6 +114,7 @@ class TestCleanupCommand(FixtureTestCase):
def test_s3_not_found(self, s3_mock, rq_mock):
s3_mock.Object.return_value.delete.side_effect = ClientError({'Error': {'Code': '404'}}, 'delete_object')
seal(s3_mock)
with patch('django.utils.timezone.now') as mock_now:
mock_now.return_value = datetime.now(timezone.utc) - timedelta(days=42)
......@@ -164,6 +165,7 @@ class TestCleanupCommand(FixtureTestCase):
error = ClientError({'Error': {'Code': '500'}}, 'delete_object')
# Fail twice, then delete successfully
s3_mock.Object.return_value.delete.side_effect = [error, error, None]
seal(s3_mock)
with patch('django.utils.timezone.now') as mock_now:
mock_now.return_value = datetime.now(timezone.utc) - timedelta(days=42)
......@@ -277,6 +279,7 @@ class TestCleanupCommand(FixtureTestCase):
ClientError({'Error': {'Code': '500'}}, 'delete_object'),
ValueError('Something went wrong'),
]
seal(s3_mock)
trashed_df = DataFile.objects.filter(trashed=True).get()
self.assertEqual(DataFile.objects.count(), 2)
......@@ -401,8 +404,7 @@ class TestCleanupCommand(FixtureTestCase):
self.assertEqual(unsupported_s3_artifact.delete.call_count, 0)
self.assertEqual(broken_s3_artifact.delete.call_count, 1)
@patch('arkindex.ponos.models.s3')
def test_cleanup_expired_processes(self, ponos_s3_mock, s3_mock, rq_mock):
def test_cleanup_expired_processes(self, s3_mock, rq_mock):
farm = Farm.objects.first()
expired_process = farm.processes.create(
......@@ -418,6 +420,22 @@ class TestCleanupCommand(FixtureTestCase):
)
expired_artifact = expired_task.artifacts.create(path='nope.txt', size=256)
expired_process_2 = farm.processes.create(
mode=ProcessMode.Dataset,
corpus=self.corpus,
creator=self.superuser,
)
expired_task_with_dataset_link = expired_process_2.tasks.create(
run=0,
depth=0,
slug='task',
expiry=datetime(1970, 1, 1, tzinfo=timezone.utc),
)
expired_task_with_dataset_link.dataset.set([
Dataset.objects.create(name="SEELE", description="Neon Genesis Evangelion", corpus=self.corpus, creator=self.user)
])
expired_artifact_with_dataset_link = expired_task_with_dataset_link.artifacts.create(path='nope.txt', size=256)
non_expired_process = farm.processes.create(
mode=ProcessMode.Workers,
corpus=self.corpus,
......@@ -439,7 +457,9 @@ class TestCleanupCommand(FixtureTestCase):
creator=self.superuser,
)
ponos_s3_mock.Object().key = 's3_key'
s3_mock.Object().key = 's3_key'
s3_mock.Object().delete.return_value = None
seal(s3_mock)
self.assertEqual(
self.cleanup(),
......@@ -483,12 +503,15 @@ class TestCleanupCommand(FixtureTestCase):
# Those still exist, refreshing works
expired_process.refresh_from_db()
expired_process_2.refresh_from_db()
non_expired_process.refresh_from_db()
expired_task_with_dataset_link.refresh_from_db()
non_expired_task.refresh_from_db()
expired_artifact_with_dataset_link.refresh_from_db()
non_expired_artifact.refresh_from_db()
empty_process.refresh_from_db()
self.assertEqual(ponos_s3_mock.Object().delete.call_count, 2)
self.assertEqual(s3_mock.Object().delete.call_count, 2)
def _make_revision_artifact(self):
"""
......@@ -523,8 +546,7 @@ class TestCleanupCommand(FixtureTestCase):
return revision, artifact
@patch('arkindex.ponos.models.s3')
def test_cleanup_expired_processes_docker_images(self, ponos_s3_mock, s3_mock, rq_mock):
def test_cleanup_expired_processes_docker_images(self, s3_mock, rq_mock):
"""
Artifacts used as Docker images for worker versions from expired processes
should only be deleted if the versions are neither on Git tags or on main branches.
......@@ -541,7 +563,9 @@ class TestCleanupCommand(FixtureTestCase):
main_revision.refs.create(repository=main_revision.repo, type=GitRefType.Branch, name='main')
tagged_revision.refs.create(repository=tagged_revision.repo, type=GitRefType.Tag, name='1.2.3-rc4')
ponos_s3_mock.Object().key = 's3_key'
s3_mock.Object().key = 's3_key'
s3_mock.Object().delete.return_value = None
seal(s3_mock)
self.assertEqual(
self.cleanup(),
......@@ -601,10 +625,9 @@ class TestCleanupCommand(FixtureTestCase):
main_revision.refresh_from_db()
tagged_revision.refresh_from_db()
self.assertEqual(ponos_s3_mock.Object().delete.call_count, 4)
self.assertEqual(s3_mock.Object().delete.call_count, 4)
@patch('arkindex.ponos.models.s3')
def test_cleanup_expired_processes_null(self, ponos_s3_mock, s3_mock, rq_mock):
def test_cleanup_expired_processes_null(self, s3_mock, rq_mock):
repo = Repository.objects.get(url='http://my_repo.fake/workers/worker')
# This revision on the `main` branch does not have any WorkerVersions.
......@@ -636,7 +659,9 @@ class TestCleanupCommand(FixtureTestCase):
branch_revision, branch_artifact = self._make_revision_artifact()
branch_revision.refs.create(repository=branch_revision.repo, type=GitRefType.Branch, name='my-awesome-branch')
ponos_s3_mock.Object().key = 's3_key'
s3_mock.Object().key = 's3_key'
s3_mock.Object().delete.return_value = None
seal(s3_mock)
self.assertEqual(
self.cleanup(),
......@@ -692,7 +717,7 @@ class TestCleanupCommand(FixtureTestCase):
empty_revision.refresh_from_db()
unavailable_worker_revision.refresh_from_db()
self.assertEqual(ponos_s3_mock.Object().delete.call_count, 4)
self.assertEqual(s3_mock.Object().delete.call_count, 4)
@patch('arkindex.documents.management.commands.cleanup.s3')
def test_cleanup_local_images(self, cleanup_s3_mock, s3_mock, rq_mock):
......
......@@ -295,7 +295,7 @@ class TestBulkClassification(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(5):
response = self.client.post(
......@@ -393,7 +393,7 @@ class TestBulkClassification(FixtureAPITestCase):
Classifications can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(8):
......@@ -440,7 +440,7 @@ class TestBulkClassification(FixtureAPITestCase):
local_process = self.user.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
self.assertNotEqual(self.worker_run.process_id, local_worker_run.process_id)
......
......@@ -795,7 +795,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(6):
......@@ -828,7 +828,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
Transcribed elements can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(14):
......@@ -871,7 +871,7 @@ class TestBulkElementTranscriptions(FixtureAPITestCase):
local_process = self.user.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(14):
......
......@@ -443,7 +443,7 @@ class TestBulkElements(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
payload = {**self.payload, 'worker_run_id': str(other_worker_run.id)}
......@@ -509,7 +509,7 @@ class TestBulkElements(FixtureAPITestCase):
Elements can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
payload = {**self.payload, 'worker_run_id': str(self.worker_run.id)}
......@@ -558,7 +558,7 @@ class TestBulkElements(FixtureAPITestCase):
local_process = self.user.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
payload = {**self.payload, 'worker_run_id': str(local_worker_run.id)}
......
......@@ -238,7 +238,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(6):
......@@ -357,7 +357,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
Transcription's entities can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(12):
......@@ -395,7 +395,7 @@ class TestBulkTranscriptionEntities(FixtureAPITestCase):
local_process = self.user.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(12):
......
......@@ -286,7 +286,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(2):
......@@ -407,7 +407,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
Transcriptions can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
element = self.corpus.elements.get(name='Volume 2, page 1r')
......@@ -462,7 +462,7 @@ class TestBulkTranscriptions(FixtureAPITestCase):
local_worker_run = local_process.worker_runs.get()
element = self.corpus.elements.get(name='Volume 2, page 1r')
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(6):
......
......@@ -260,7 +260,7 @@ class TestClassifications(FixtureAPITestCase):
# Take the user's local process, so that the user's rights will take over the Ponos task auth
local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(8):
......@@ -301,7 +301,7 @@ class TestClassifications(FixtureAPITestCase):
A classification can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(8):
......@@ -388,7 +388,7 @@ class TestClassifications(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(6):
......
......@@ -578,7 +578,7 @@ class TestCreateElements(FixtureAPITestCase):
})
def test_worker_run_task(self):
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(10):
......@@ -613,7 +613,7 @@ class TestCreateElements(FixtureAPITestCase):
# Take the user's local process, so that the user's rights will take over the Ponos task auth
local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(10):
......@@ -695,7 +695,7 @@ class TestCreateElements(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(6):
......
......@@ -223,7 +223,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
# Take the user's local process, so that the user's rights will take over the Ponos task auth
local_process = self.worker_run.process.creator.processes.get(mode=ProcessMode.Local)
local_worker_run = local_process.worker_runs.get()
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(8):
......@@ -259,7 +259,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
A transcription can be created with a WorkerRun of a non-local process
when authenticated as a Ponos task of this process
"""
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(8):
......@@ -345,7 +345,7 @@ class TestTranscriptionCreate(FixtureAPITestCase):
corpus=self.corpus,
)
other_worker_run = process2.worker_runs.create(version=self.worker_run.version, parents=[])
self.worker_run.process.start()
self.worker_run.process.run()
task = self.worker_run.process.tasks.first()
with self.assertNumQueries(7):
......