Skip to content
Snippets Groups Projects
Commit 902e8b3f authored by ml bonhomme's avatar ml bonhomme :bee: Committed by Erwan Rouchet
Browse files

Remove ProcessMode.IIIF

parent 399f0799
No related branches found
No related tags found
1 merge request!2340Remove ProcessMode.IIIF
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("process", "0033_remove_process_generate_thumbnails"),
]
operations = [
migrations.RunSQL(
[
"""
UPDATE process_process
SET mode = 'files'
WHERE mode = 'iiif'
"""
],
reverse_sql=migrations.RunSQL.noop,
elidable=True,
)
]
...@@ -61,7 +61,6 @@ class ActivityState(Enum): ...@@ -61,7 +61,6 @@ class ActivityState(Enum):
class ProcessMode(Enum): class ProcessMode(Enum):
Files = "files" Files = "files"
IIIF = "iiif"
Workers = "workers" Workers = "workers"
Template = "template" Template = "template"
S3 = "s3" S3 = "s3"
......
from textwrap import dedent
from drf_spectacular.utils import extend_schema_field from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers from rest_framework import serializers
...@@ -14,7 +16,13 @@ class DataFileSerializer(serializers.ModelSerializer): ...@@ -14,7 +16,13 @@ class DataFileSerializer(serializers.ModelSerializer):
""" """
status = EnumField(S3FileStatus) status = EnumField(S3FileStatus)
s3_url = serializers.SerializerMethodField() s3_url = serializers.SerializerMethodField(
help_text=dedent("""
URL that can be used to download this DataFile from S3.
Only available to Ponos tasks of `Files` processes or instance administrators.
""")
)
class Meta: class Meta:
model = DataFile model = DataFile
...@@ -41,11 +49,11 @@ class DataFileSerializer(serializers.ModelSerializer): ...@@ -41,11 +49,11 @@ class DataFileSerializer(serializers.ModelSerializer):
def get_s3_url(self, obj): def get_s3_url(self, obj):
if "request" not in self.context: if "request" not in self.context:
return return
# Only allow the S3 URL for ponos tasks of Files or IIIF processes or admins # Only allow the S3 URL for ponos tasks of Files processes or admins
request = self.context["request"] request = self.context["request"]
if is_admin_or_ponos_task(request): if is_admin_or_ponos_task(request):
request_process = get_process_from_task_auth(request) request_process = get_process_from_task_auth(request)
if not request_process or request_process.mode in (ProcessMode.Files, ProcessMode.IIIF): if not request_process or request_process.mode == ProcessMode.Files:
return obj.s3_url return obj.s3_url
......
...@@ -184,10 +184,10 @@ class ProcessDetailsSerializer(ProcessSerializer): ...@@ -184,10 +184,10 @@ class ProcessDetailsSerializer(ProcessSerializer):
# Fields that can always be edited on any process of any state # Fields that can always be edited on any process of any state
editable_fields = {"name", "state"} editable_fields = {"name", "state"}
# Allow editing the element ID and name on Files and IIIF processes at any time # Allow editing the element ID and name on Files processes at any time
# TODO: Only allow editing the element ID on a running file import to Ponos tasks, # TODO: Only allow editing the element ID on a running file import to Ponos tasks,
# since this edition is only permitted to show the "View element" button once the import completes. # since this edition is only permitted to show the "View element" button once the import completes.
if self.instance.mode in (ProcessMode.Files, ProcessMode.IIIF): if self.instance.mode == ProcessMode.Files:
editable_fields.add("element") editable_fields.add("element")
# If some fields are being edited that are not the fields that are always editable # If some fields are being edited that are not the fields that are always editable
...@@ -239,7 +239,6 @@ class ProcessListSerializer(ProcessLightSerializer): ...@@ -239,7 +239,6 @@ class ProcessListSerializer(ProcessLightSerializer):
class FilesProcessSerializer(serializers.ModelSerializer): class FilesProcessSerializer(serializers.ModelSerializer):
mode = EnumField(ProcessMode, default=ProcessMode.Files)
files = serializers.PrimaryKeyRelatedField( files = serializers.PrimaryKeyRelatedField(
queryset=DataFile.objects.select_related("corpus"), queryset=DataFile.objects.select_related("corpus"),
many=True, many=True,
...@@ -249,7 +248,7 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -249,7 +248,7 @@ class FilesProcessSerializer(serializers.ModelSerializer):
allow_null=True, allow_null=True,
source="element", source="element",
) )
folder_type = serializers.SlugField(required=False, allow_null=True) folder_type = serializers.SlugField()
element_type = serializers.SlugField() element_type = serializers.SlugField()
farm_id = serializers.PrimaryKeyRelatedField( farm_id = serializers.PrimaryKeyRelatedField(
queryset=Farm.objects.all(), queryset=Farm.objects.all(),
...@@ -262,15 +261,11 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -262,15 +261,11 @@ class FilesProcessSerializer(serializers.ModelSerializer):
creator = serializers.HiddenField(default=serializers.CurrentUserDefault()) creator = serializers.HiddenField(default=serializers.CurrentUserDefault())
default_error_messages = { default_error_messages = {
"mode_not_allowed": "This mode is not allowed when importing from files",
"files_required": "At least one file is required to start an import process", "files_required": "At least one file is required to start an import process",
"unique_corpus": "Imports can only run on files from a single corpus", "unique_corpus": "Imports can only run on files from a single corpus",
"corpus_read_only": "Cannot write in corpus", "corpus_read_only": "Cannot write in corpus",
"folder_not_found": "Folder does not exist", "folder_not_found": "Folder does not exist",
"unsupported_content_type": "File imports can only import images, PDF documents or ZIP archives", "unsupported_content_type": "File imports can only import images, PDF documents, ZIP archives or IIIF manifests",
"iiif_only": "IIIF imports can only import IIIF documents",
"folder_required": "Either folder_type, folder_id or both are required",
"iiif_folder_required": "IIIF imports require both folder_type and element_type",
"type_not_found": "Element type {slug!r} does not exist", "type_not_found": "Element type {slug!r} does not exist",
"type_not_folder": "Element type {slug!r} is not a folder", "type_not_folder": "Element type {slug!r} is not a folder",
"type_folder": "Element type {slug!r} is a folder", "type_folder": "Element type {slug!r} is a folder",
...@@ -280,7 +275,6 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -280,7 +275,6 @@ class FilesProcessSerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = Process model = Process
fields = ( fields = (
"mode",
"files", "files",
"folder_id", "folder_id",
"folder_type", "folder_type",
...@@ -289,11 +283,6 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -289,11 +283,6 @@ class FilesProcessSerializer(serializers.ModelSerializer):
"creator", "creator",
) )
def validate_mode(self, mode):
if mode not in (ProcessMode.Files, ProcessMode.IIIF):
self.fail("mode_not_allowed")
return mode
def validate_files(self, files): def validate_files(self, files):
if not files: if not files:
self.fail("files_required") self.fail("files_required")
...@@ -326,21 +315,14 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -326,21 +315,14 @@ class FilesProcessSerializer(serializers.ModelSerializer):
return farm return farm
def validate(self, data): def validate(self, data):
if data["mode"] == ProcessMode.Files: if not all(
if not all( f.content_type == "application/pdf"
f.content_type == "application/pdf" or f.content_type in settings.ARCHIVE_MIME_TYPES
or f.content_type in settings.ARCHIVE_MIME_TYPES or f.content_type.startswith("image/")
or f.content_type.startswith("image/") or f.content_type.split(";")[0] in ["application/json", "application/ld+json"]
for f in data["files"] for f in data["files"]
): ):
self.fail("unsupported_content_type") self.fail("unsupported_content_type")
elif data["mode"] == ProcessMode.IIIF:
if not set(f.content_type.split(";")[0] for f in data["files"]) <= {"application/json", "application/ld+json"}:
self.fail("iiif_only")
else:
raise NotImplementedError
data["corpus"] = corpus = data["files"][0].corpus data["corpus"] = corpus = data["files"][0].corpus
...@@ -354,29 +336,21 @@ class FilesProcessSerializer(serializers.ModelSerializer): ...@@ -354,29 +336,21 @@ class FilesProcessSerializer(serializers.ModelSerializer):
data["element_type"] = element_type data["element_type"] = element_type
folder_type_slug, folder = data.get("folder_type"), data.get("element") folder_type_slug, folder = data.get("folder_type"), data.get("element")
if folder_type_slug: folder_type = corpus.types.filter(slug=folder_type_slug).first()
folder_type = corpus.types.filter(slug=folder_type_slug).first() if not folder_type:
if not folder_type: self.fail("type_not_found", slug=folder_type_slug)
self.fail("type_not_found", slug=folder_type_slug) if not folder_type.folder:
if not folder_type.folder: self.fail("type_not_folder", slug=folder_type_slug)
self.fail("type_not_folder", slug=folder_type_slug) data["folder_type"] = folder_type
data["folder_type"] = folder_type # When a folder is set, it must be in the same corpus as all files
else: if folder and folder.corpus_id != corpus.id:
if data["mode"] == ProcessMode.IIIF: self.fail("wrong_folder_corpus")
# folder_type is required in IIIF
self.fail("iiif_folder_required")
if not folder:
# Either folder_type or folder_id are required for other imports
self.fail("folder_required")
# When a folder is set, it must be in the same corpus as all files
if folder.corpus_id != corpus.id:
self.fail("wrong_folder_corpus")
return data return data
def create(self, validated_data): def create(self, validated_data):
files = validated_data.pop("files") files = validated_data.pop("files")
process = super().create(validated_data) process = super().create({"mode": ProcessMode.Files.value, **validated_data})
process.files.set(files) process.files.set(files)
process.run() process.run()
return process return process
......
...@@ -171,7 +171,7 @@ class TestDataFileApi(FixtureAPITestCase): ...@@ -171,7 +171,7 @@ class TestDataFileApi(FixtureAPITestCase):
def test_retrieve_datafile_s3_url_task_process_mode(self, gen_url_mock): def test_retrieve_datafile_s3_url_task_process_mode(self, gen_url_mock):
""" """
Ponos task authentication allows access to the S3 URL, only if the task's Ponos task authentication allows access to the S3 URL, only if the task's
parent process has a Files or IIIF mode. parent process has a Files mode.
""" """
user = User.objects.create(email="user2@test.test", display_name="User 2", verified_email=True) user = User.objects.create(email="user2@test.test", display_name="User 2", verified_email=True)
gen_url_mock.return_value = "http://somewhere" gen_url_mock.return_value = "http://somewhere"
...@@ -185,7 +185,7 @@ class TestDataFileApi(FixtureAPITestCase): ...@@ -185,7 +185,7 @@ class TestDataFileApi(FixtureAPITestCase):
with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)): with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
process.run() process.run()
cases = [ cases = [
(process_mode, "http://somewhere" if process_mode in (ProcessMode.Files, ProcessMode.IIIF) else None) (process_mode, "http://somewhere" if process_mode == ProcessMode.Files else None)
for process_mode in ProcessMode for process_mode in ProcessMode
] ]
for process_mode, s3_url in cases: for process_mode, s3_url in cases:
......
...@@ -58,6 +58,11 @@ class TestProcesses(FixtureAPITestCase): ...@@ -58,6 +58,11 @@ class TestProcesses(FixtureAPITestCase):
size=42, size=42,
content_type="application/json", content_type="application/json",
) )
cls.bad_df = cls.corpus.files.create(
name="test.mp4",
size=42,
content_type="video/mp4",
)
cls.page_type = ElementType.objects.get(corpus=cls.corpus, slug="page") cls.page_type = ElementType.objects.get(corpus=cls.corpus, slug="page")
cls.volume_type = ElementType.objects.get(corpus=cls.corpus, slug="volume") cls.volume_type = ElementType.objects.get(corpus=cls.corpus, slug="volume")
cls.ml_class = cls.corpus.ml_classes.create(name="clafoutis") cls.ml_class = cls.corpus.ml_classes.create(name="clafoutis")
...@@ -1790,32 +1795,6 @@ class TestProcesses(FixtureAPITestCase): ...@@ -1790,32 +1795,6 @@ class TestProcesses(FixtureAPITestCase):
["import_s3"], ["import_s3"],
) )
def test_retry_iiif(self):
self.client.force_login(self.user)
process = self.corpus.processes.create(
mode=ProcessMode.IIIF,
creator=self.user,
)
process.worker_runs.create(version=self.version_with_model)
process.tasks.create(state=State.Error, run=0, depth=0)
self.assertEqual(process.state, State.Error)
process.finished = timezone.now()
with (
self.settings(IMPORTS_WORKER_VERSION=str(self.version_with_model.id)),
self.assertNumQueries(14),
):
response = self.client.post(reverse("api:process-retry", kwargs={"pk": process.id}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
process.refresh_from_db()
self.assertEqual(process.state, State.Unscheduled)
self.assertIsNone(process.finished)
self.assertQuerysetEqual(
process.tasks.filter(run=1).values_list("slug", flat=True),
["import_iiif"],
)
def test_from_files_requires_login(self): def test_from_files_requires_login(self):
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
"files": [str(self.img_df.id)], "files": [str(self.img_df.id)],
...@@ -1859,7 +1838,6 @@ class TestProcesses(FixtureAPITestCase): ...@@ -1859,7 +1838,6 @@ class TestProcesses(FixtureAPITestCase):
with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)): with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
"files": [str(self.pdf_df.id)], "files": [str(self.pdf_df.id)],
"mode": "files",
"folder_type": "volume", "folder_type": "volume",
"element_type": "page", "element_type": "page",
}, format="json") }, format="json")
...@@ -1870,6 +1848,24 @@ class TestProcesses(FixtureAPITestCase): ...@@ -1870,6 +1848,24 @@ class TestProcesses(FixtureAPITestCase):
self.assertListEqual(list(process.files.all()), [self.pdf_df]) self.assertListEqual(list(process.files.all()), [self.pdf_df])
self.assertIsNone(process.element) self.assertIsNone(process.element)
@override_settings(IMPORTS_WORKER_VERSION=None)
def test_from_files_iiif(self):
self.client.force_login(self.user)
with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)],
"folder_type": "volume",
"element_type": "page",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = response.json()
process = Process.objects.get(id=data["id"])
self.assertEqual(process.mode, ProcessMode.Files)
self.assertListEqual(list(process.files.all()), [self.iiif_df])
self.assertIsNone(process.element)
@override_settings(IMPORTS_WORKER_VERSION=None) @override_settings(IMPORTS_WORKER_VERSION=None)
def test_from_files_multiple_types(self): def test_from_files_multiple_types(self):
self.client.force_login(self.user) self.client.force_login(self.user)
...@@ -1884,11 +1880,12 @@ class TestProcesses(FixtureAPITestCase): ...@@ -1884,11 +1880,12 @@ class TestProcesses(FixtureAPITestCase):
for content_type in settings.ARCHIVE_MIME_TYPES for content_type in settings.ARCHIVE_MIME_TYPES
) )
with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)), self.assertNumQueries(37): with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)), self.assertNumQueries(38):
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
"files": [ "files": [
str(self.pdf_df.id), str(self.pdf_df.id),
str(self.img_df.id), str(self.img_df.id),
str(self.iiif_df.id),
*(datafile.id for datafile in archive_datafiles), *(datafile.id for datafile in archive_datafiles),
], ],
"mode": "files", "mode": "files",
...@@ -1904,137 +1901,41 @@ class TestProcesses(FixtureAPITestCase): ...@@ -1904,137 +1901,41 @@ class TestProcesses(FixtureAPITestCase):
process.files.all(), [ process.files.all(), [
self.img_df, self.img_df,
self.pdf_df, self.pdf_df,
self.iiif_df,
*archive_datafiles, *archive_datafiles,
], ],
ordered=False, ordered=False,
) )
self.assertIsNone(process.element) self.assertIsNone(process.element)
@override_settings( def test_from_files_files_wrong_type(self):
PONOS_DEFAULT_ENV={"ARKINDEX_API_TOKEN": "testToken"},
ARKINDEX_TASKS_IMAGE="registry.teklia.com/tasks",
)
def test_from_files_iiif(self):
self.client.force_login(self.user) self.client.force_login(self.user)
with self.assertNumQueries(25), self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)): with self.assertNumQueries(4):
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)], "files": [str(self.bad_df.id)],
"mode": "iiif",
"folder_type": "volume", "folder_type": "volume",
"element_type": "page", "element_type": "page",
}, format="json") }, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = response.json()
process = Process.objects.get(id=data["id"])
self.assertEqual(process.mode, ProcessMode.IIIF)
self.assertListEqual(list(process.files.all()), [self.iiif_df])
self.assertEqual(process.folder_type.slug, "volume")
self.assertEqual(process.element_type.slug, "page")
self.assertIsNone(process.element)
self.assertEqual(process.creator, self.user)
self.assertEqual(process.corpus, self.corpus)
worker_run = process.worker_runs.get()
self.assertEqual(worker_run.version, self.import_worker_version)
self.assertListEqual(worker_run.parents, [])
self.assertIsNone(worker_run.configuration_id)
self.assertIsNone(worker_run.model_version_id)
self.assertTrue(process.tasks.exists()) self.assertEqual(response.json(), {"non_field_errors": ["File imports can only import images, PDF documents, ZIP archives or IIIF manifests"]})
@override_settings(IMPORTS_WORKER_VERSION=None) @override_settings(IMPORTS_WORKER_VERSION=None)
def test_from_files_iiif_with_json_charset(self): def test_from_files_mode_ignored(self):
self.iiif_df.content_type = "application/json;charset=utf-8"
self.assertEqual(self.iiif_df.content_type, "application/json;charset=utf-8")
self.client.force_login(self.user) self.client.force_login(self.user)
with(self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id))):
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)],
"mode": "iiif",
"folder_type": "volume",
"element_type": "page",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = response.json()
process = Process.objects.get(id=data["id"])
self.assertEqual(process.mode, ProcessMode.IIIF)
self.assertListEqual(list(process.files.all()), [self.iiif_df])
self.assertEqual(process.folder_type.slug, "volume")
self.assertEqual(process.element_type.slug, "page")
self.assertIsNone(process.element)
@override_settings(IMPORTS_WORKER_VERSION=None) with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
def test_from_files_iiif_with_json_profile_and_charset(self):
self.iiif_df.content_type = 'application/ld+json;profile="http://iiif.io/api/presentation/2/context.json";charset=utf-8'
self.assertEqual(self.iiif_df.content_type, 'application/ld+json;profile="http://iiif.io/api/presentation/2/context.json";charset=utf-8')
self.client.force_login(self.user)
with(self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id))):
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)], "files": [str(self.iiif_df.id)],
"mode": "iiif",
"folder_type": "volume", "folder_type": "volume",
"element_type": "page", "element_type": "page",
"mode": "easy"
}, format="json") }, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
data = response.json() data = response.json()
process = Process.objects.get(id=data["id"]) process = Process.objects.get(id=data["id"])
self.assertEqual(process.mode, ProcessMode.IIIF) self.assertEqual(process.mode, ProcessMode.Files)
self.assertListEqual(list(process.files.all()), [self.iiif_df])
self.assertEqual(process.folder_type.slug, "volume")
self.assertEqual(process.element_type.slug, "page")
self.assertIsNone(process.element)
def test_from_files_iiif_requires_folder_type(self):
self.client.force_login(self.user)
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)],
"mode": "iiif",
"element_type": "page",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictEqual(response.json(), {
"non_field_errors": ["IIIF imports require both folder_type and element_type"]
})
def test_from_files_invalid_mode(self):
self.client.force_login(self.user)
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.img_df.id)],
"folder_type": "volume",
"element_type": "page",
"mode": "repository",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_from_files_iiif_wrong_type(self):
self.client.force_login(self.user)
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.pdf_df.id)],
"folder_type": "volume",
"element_type": "page",
"mode": "iiif",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.json(), {"non_field_errors": ["IIIF imports can only import IIIF documents"]})
def test_from_files_files_wrong_type(self):
self.client.force_login(self.user)
with self.assertNumQueries(4):
response = self.client.post(reverse("api:files-process"), {
"files": [str(self.iiif_df.id)],
"folder_type": "volume",
"element_type": "page",
"mode": "files",
}, format="json")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(response.json(), {"non_field_errors": ["File imports can only import images, PDF documents or ZIP archives"]})
@override_settings(IMPORTS_WORKER_VERSION=None) @override_settings(IMPORTS_WORKER_VERSION=None)
def test_from_files_folder_id(self): def test_from_files_folder_id(self):
...@@ -2047,6 +1948,7 @@ class TestProcesses(FixtureAPITestCase): ...@@ -2047,6 +1948,7 @@ class TestProcesses(FixtureAPITestCase):
"files": [str(self.pdf_df.id)], "files": [str(self.pdf_df.id)],
"mode": "files", "mode": "files",
"folder_id": str(volume.id), "folder_id": str(volume.id),
"folder_type": "volume",
"element_type": "page", "element_type": "page",
}, },
format="json", format="json",
...@@ -2056,6 +1958,23 @@ class TestProcesses(FixtureAPITestCase): ...@@ -2056,6 +1958,23 @@ class TestProcesses(FixtureAPITestCase):
process = Process.objects.get(id=data["id"]) process = Process.objects.get(id=data["id"])
self.assertEqual(process.element, volume) self.assertEqual(process.element, volume)
def test_from_files_folder_type_required(self):
self.client.force_login(self.user)
volume = self.corpus.elements.get(name="Volume 1")
with self.settings(IMPORTS_WORKER_VERSION=str(self.import_worker_version.id)):
response = self.client.post(
reverse("api:files-process"),
{
"files": [str(self.pdf_df.id)],
"mode": "files",
"folder_id": str(volume.id),
"element_type": "page",
},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictEqual(response.json(), {"folder_type": ["This field is required."]})
def test_from_files_no_files(self): def test_from_files_no_files(self):
self.client.force_login(self.user) self.client.force_login(self.user)
response = self.client.post(reverse("api:files-process"), { response = self.client.post(reverse("api:files-process"), {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment