diff --git a/arkindex/documents/export/dataset.sql b/arkindex/documents/export/dataset.sql index 48d54910f083f8c3c81ea74b13b1387f23595bb9..557ba99f05df05ed0d6a620ac1ab12e06cfa1932 100644 --- a/arkindex/documents/export/dataset.sql +++ b/arkindex/documents/export/dataset.sql @@ -1,6 +1,7 @@ SELECT dataset.id, dataset.name, + dataset.description, dataset.state, string_agg(datasetset.name, ',') FROM training_dataset dataset diff --git a/arkindex/documents/export/structure.sql b/arkindex/documents/export/structure.sql index b3eca511e3689527efa56d9c459b30eabe9c6c9f..da2bafd29225af088f5ece876c720af08f8bac1c 100644 --- a/arkindex/documents/export/structure.sql +++ b/arkindex/documents/export/structure.sql @@ -171,6 +171,7 @@ CREATE TABLE metadata ( CREATE TABLE dataset ( id VARCHAR(37) NOT NULL, name VARCHAR(100) NOT NULL, + description TEXT NOT NULL, state VARCHAR(50) NOT NULL DEFAULT 'open', sets TEXT NOT NULL, PRIMARY KEY (id) diff --git a/arkindex/documents/management/commands/load_export.py b/arkindex/documents/management/commands/load_export.py index fcf94b613938f592423531dba08748e324b963ec..d3240d55f18f8b3883f4b589c2eac400e7d9a319 100644 --- a/arkindex/documents/management/commands/load_export.py +++ b/arkindex/documents/management/commands/load_export.py @@ -297,7 +297,7 @@ class Command(BaseCommand): corpus=self.corpus, name=row["name"], creator=self.user, - description="Imported dataset", + description=row["description"] if "description" in row.keys() else "Imported dataset", )] def convert_dataset_sets(self, row): diff --git a/arkindex/documents/tests/commands/test_load_export.py b/arkindex/documents/tests/commands/test_load_export.py index f1fca7354850915d69a84adf1d68c36b4b619a8b..ff43bf2fe3eb22b7be8c5016e2503724d531f12f 100644 --- a/arkindex/documents/tests/commands/test_load_export.py +++ b/arkindex/documents/tests/commands/test_load_export.py @@ -13,7 +13,7 @@ from arkindex.documents.models import Corpus, Element, ElementPath, EntityType, from arkindex.documents.tasks import corpus_delete from arkindex.images.models import Image, ImageServer from arkindex.process.models import ProcessMode, Repository, Worker, WorkerRun, WorkerType, WorkerVersion -from arkindex.project.tests import FixtureTestCase +from arkindex.project.tests import FixtureTestCase, force_constraints_immediate from arkindex.training.models import Dataset, DatasetElement BASE_DIR = Path(__file__).absolute().parent @@ -211,7 +211,8 @@ class TestLoadExport(FixtureTestCase): ImageServer.objects.all().delete() WorkerVersion.objects.filter(id=reco_version.id).delete() - call_command("load_export", db_path, "--email", self.user.email, "--corpus-name", "My corpus") + with force_constraints_immediate(): + call_command("load_export", db_path, "--email", self.user.email, "--corpus-name", "My corpus") # Call dumpdata command after the import _, dump_path_after = tempfile.mkstemp(suffix=".json") @@ -247,7 +248,8 @@ class TestLoadExport(FixtureTestCase): db.commit() db.close() - call_command("load_export", temp_file, "--email", self.user.email, "--corpus-name", "My corpus") + with force_constraints_immediate(): + call_command("load_export", temp_file, "--email", self.user.email, "--corpus-name", "My corpus") corpus = Corpus.objects.get(name="My corpus") self.assertEqual(corpus.types.all().count(), 0) @@ -307,7 +309,8 @@ class TestLoadExport(FixtureTestCase): # meaning the test can only fail. ElementPath.objects.filter(element__corpus=self.corpus).delete() - call_command("load_export", db_path, "--email", self.user.email, "--corpus-name", "My corpus") + with force_constraints_immediate(): + call_command("load_export", db_path, "--email", self.user.email, "--corpus-name", "My corpus") corpus = Corpus.objects.get(name="My corpus") self.assertEqual(corpus.types.all().count(), 6)