diff --git a/arkindex/training/migrations/0008_dataset_unique_elements.py b/arkindex/training/migrations/0008_dataset_unique_elements.py index 44224cfde6ebd29ee514783991791619e44eaea4..229afdda5432d1dbc4c5d0519407d8c27d397d64 100644 --- a/arkindex/training/migrations/0008_dataset_unique_elements.py +++ b/arkindex/training/migrations/0008_dataset_unique_elements.py @@ -11,9 +11,9 @@ def update_unique_elements(apps, schema_editor): models.Exists( DatasetElement.objects .filter(set__dataset_id=models.OuterRef("pk")) - .values("element") - .annotate(dups=models.Count("element")) - .filter(dups__gte=2) + .values("element_id") + .annotate(dupes=models.Count("element_id")) + .filter(dupes__gte=2) ) ).update(unique_elements=False) diff --git a/arkindex/training/serializers.py b/arkindex/training/serializers.py index 9686bbb12496048efddacb47275206d6e0826b39..5faa79fc6152fcb6ea3ab0a80252a8e8c7fe18e7 100644 --- a/arkindex/training/serializers.py +++ b/arkindex/training/serializers.py @@ -583,9 +583,9 @@ class DatasetSerializer(serializers.ModelSerializer): if unique is True and self.instance and ( DatasetElement.objects .filter(set__dataset_id=self.instance.pk) - .values("element") - .annotate(dups=Count("element")) - .filter(dups__gte=2) + .values("element_id") + .annotate(dupes=Count("element_id")) + .filter(dupes__gte=2) .exists() ): raise ValidationError("Elements are currently contained by multiple sets.")