Skip to content
Snippets Groups Projects

Ignore sets when deleting a process dataset

Merged ml bonhomme requested to merge del-process-dataset-ignore-sets into master
2 files
+ 20
2
Compare changes
  • Side-by-side
  • Inline
Files
2
@@ -50,7 +50,6 @@ class TestProcessDatasets(FixtureAPITestCase):
corpus_id=cls.corpus.id
)
ProcessDataset.objects.create(process=cls.dataset_process_2, dataset=cls.dataset2, sets=cls.dataset2.sets)
cls.dataset_process_2.datasets.set([cls.dataset2])
# For repository process
cls.repo = Repository.objects.get(url="http://my_repo.fake/workers/worker")
@@ -766,3 +765,18 @@ class TestProcessDatasets(FixtureAPITestCase):
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(ProcessDataset.objects.filter(process=self.dataset_process, dataset=self.dataset1).exists())
def test_destroy_sets_agnostic(self):
"""
When deleting a process dataset, it doesn't matter what its sets are as there cannot be two process datasets
with the same process and dataset, whatever the sets are.
"""
self.process_dataset_1.sets = ["test"]
self.process_dataset_1.save()
self.client.force_login(self.test_user)
with self.assertNumQueries(9):
response = self.client.delete(
reverse("api:process-dataset", kwargs={"process": self.dataset_process.id, "dataset": self.dataset1.id}),
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertFalse(ProcessDataset.objects.filter(process=self.dataset_process, dataset=self.dataset1).exists())
Loading