Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • arkindex/backend
1 result
Show changes
Commits on Source (7)
1.1.2-rc3
1.1.2
......@@ -1045,7 +1045,6 @@
"name": "number",
"type": "text",
"value": "1",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1058,7 +1057,6 @@
"name": "number",
"type": "text",
"value": "4",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1071,7 +1069,6 @@
"name": "folio",
"type": "text",
"value": "2r",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1084,7 +1081,6 @@
"name": "folio",
"type": "text",
"value": "1v",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1097,7 +1093,6 @@
"name": "folio",
"type": "text",
"value": "1r",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1110,7 +1105,6 @@
"name": "number",
"type": "text",
"value": "2",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1123,7 +1117,6 @@
"name": "number",
"type": "text",
"value": "5",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1136,7 +1129,6 @@
"name": "folio",
"type": "text",
"value": "1r",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1149,7 +1141,6 @@
"name": "folio",
"type": "text",
"value": "1v",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1162,7 +1153,6 @@
"name": "folio",
"type": "text",
"value": "2r",
"index": 0,
"entity": null,
"worker_version": null
}
......@@ -1175,7 +1165,6 @@
"name": "number",
"type": "text",
"value": "3",
"index": 0,
"entity": null,
"worker_version": null
}
......
# Generated by Django 3.2.3 on 2021-10-22 14:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('documents', '0043_element_creator'),
]
operations = [
migrations.AlterModelOptions(
name='metadata',
options={'ordering': ('element', 'name', 'id')},
),
migrations.AlterUniqueTogether(
name='metadata',
unique_together=set(),
),
migrations.RemoveField(
model_name='metadata',
name='index',
),
]
......@@ -668,7 +668,6 @@ class MetaData(InterpretedDateMixin, models.Model):
name = models.CharField(max_length=250)
type = EnumField(MetaType, max_length=50, db_index=True)
value = models.TextField()
index = models.PositiveIntegerField(default=0)
entity = models.ForeignKey(Entity, null=True, blank=True, related_name='metadatas', on_delete=models.SET_NULL)
worker_version = models.ForeignKey(
'dataimport.WorkerVersion',
......@@ -679,10 +678,7 @@ class MetaData(InterpretedDateMixin, models.Model):
)
class Meta:
ordering = ('element', 'name', 'index')
unique_together = (
('element', 'name', 'index'),
)
ordering = ('element', 'name', 'id')
constraints = [
models.CheckConstraint(
# Either the metadata is not numeric, or it has a value that casts to a double properly.
......
import bleach
from django.db.models import Max
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
......@@ -151,32 +150,27 @@ class MetaDataLightSerializer(serializers.ModelSerializer):
if not (user.is_admin or user.is_internal):
self.check_allowed(corpus=element.corpus, **validated_data)
metadata = element.metadatas.using('default').filter(name=validated_data['name'])
max_index = metadata.aggregate(Max('index'))['index__max']
# max_index will be None if there are no metadata.
# If there are any metadata, check that we are not creating a duplicate
if max_index is not None:
try:
existing_metadata = metadata.filter(value=validated_data['value'], type=validated_data['type']).get()
except MetaData.DoesNotExist:
# No existing metadata found: carry on!
pass
except MetaData.MultipleObjectsReturned:
# In theory, we should have a unique constraint on element_id/name/type/value, but that isn't possible
# because the values can get very long and exceed the size of a B-Tree index tuple (8/3 kibibytes).
# The APIs have unicity checks in place so this should never happen, but should is not enough.
# If there are any duplicate metadatas, we just let the exception escalate to Sentry so that we can troubleshoot.
raise
else:
# Send the existing ID in an HTTP 400 error
raise ValidationError({
'detail': ['This metadata already exists.'],
'id': str(existing_metadata.id),
})
# Increment the metadata index
validated_data['index'] = max_index + 1
try:
existing_metadata = element.metadatas.using('default').filter(
name=validated_data['name'],
type=validated_data['type'],
value=validated_data['value'],
).get()
except MetaData.DoesNotExist:
# No existing metadata found: carry on!
pass
except MetaData.MultipleObjectsReturned:
# In theory, we should have a unique constraint on element_id/name/type/value, but that isn't possible
# because the values can get very long and exceed the size of a B-Tree index tuple (8/3 kibibytes).
# The APIs have unicity checks in place so this should never happen, but should is not enough.
# If there are any duplicate metadatas, we just let the exception escalate to Sentry so that we can troubleshoot.
raise
else:
# Send the existing ID in an HTTP 400 error
raise ValidationError({
'detail': ['This metadata already exists.'],
'id': str(existing_metadata.id),
})
return element.metadatas.create(**validated_data)
......@@ -189,12 +183,6 @@ class MetaDataLightSerializer(serializers.ModelSerializer):
# Assert the new values is part of AllowedMetaData
# Still provide the instance so that the check can rely on the original values for partial updates
self.check_allowed(corpus=element.corpus, instance=instance, **validated_data)
new_name = validated_data.get('name') or instance.name
metadata = element.metadatas \
.filter(name=new_name) \
.exclude(id=instance.id)
if metadata.exists():
validated_data['index'] = metadata.aggregate(Max('index'))['index__max'] + 1
return super().update(instance, validated_data)
......
......@@ -77,7 +77,6 @@ class TestExport(FixtureTestCase):
type=MetaType.Numeric,
name='Score',
value=1,
index=0,
worker_version=metadata_version,
)
......
......@@ -452,7 +452,7 @@ class TestChildrenElements(FixtureAPITestCase):
element = self.corpus.elements.create(type=element_type, name=f'Element {i}')
element.add_parent(self.vol)
element.metadatas.create(name='number', type=MetaType.Numeric, value=str(i))
element.metadatas.create(name='number', type=MetaType.Text, value='a', index=1)
element.metadatas.create(name='number', type=MetaType.Text, value='a')
# operator, value, expected element names
parameters = [
......
......@@ -493,7 +493,7 @@ class TestListElements(FixtureAPITestCase):
for i in range(1, 6):
element = self.corpus.elements.create(type=element_type, name=f'Element {i}')
element.metadatas.create(name='number', type=MetaType.Numeric, value=str(i))
element.metadatas.create(name='number', type=MetaType.Text, value='a', index=1)
element.metadatas.create(name='number', type=MetaType.Text, value='a')
# operator, value, expected element names
parameters = [
......
......@@ -249,8 +249,6 @@ class TestMetaData(FixtureAPITestCase):
metadata = self.vol.metadatas.filter(name='edition')
self.assertEqual(metadata.count(), 2)
self.assertEqual(metadata.get(type=MetaType.Date).index, 0)
self.assertEqual(metadata.get(type=MetaType.Text).index, 1)
def test_create_duplicated_metadata(self):
self.client.force_login(self.user)
......@@ -262,7 +260,7 @@ class TestMetaData(FixtureAPITestCase):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
metadata_id = response.json()['id']
with self.assertNumQueries(7):
with self.assertNumQueries(6):
response = self.client.post(
reverse('api:element-metadata', kwargs={'pk': str(self.vol.id)}),
data={'type': 'date', 'name': 'edition', 'value': '1986-june'}
......@@ -282,10 +280,10 @@ class TestMetaData(FixtureAPITestCase):
with that by checking themselves before inserting. If we still find duplicate metadata in the database, the API
should fail to allow us to notice the issue and troubleshoot it.
"""
self.vol.metadatas.create(name='test', type=MetaType.Text, index=0, value='And again')
self.vol.metadatas.create(name='test', type=MetaType.Text, index=1, value='And again')
self.vol.metadatas.create(name='test', type=MetaType.Text, value='And again')
self.vol.metadatas.create(name='test', type=MetaType.Text, value='And again')
self.client.force_login(self.superuser)
with self.assertNumQueries(5):
with self.assertNumQueries(4):
with self.assertRaises(MetaData.MultipleObjectsReturned):
self.client.post(
reverse('api:element-metadata', kwargs={'pk': str(self.vol.id)}),
......@@ -512,7 +510,6 @@ class TestMetaData(FixtureAPITestCase):
def test_patch_to_existing_metadata(self):
metadata = self.vol.metadatas.create(type=MetaType.Location, name='location', value='111,222')
self.assertEqual(metadata.index, 0)
self.client.force_login(self.user)
response = self.client.patch(
reverse('api:metadata-edit', kwargs={'pk': str(metadata.id)}),
......@@ -520,7 +517,6 @@ class TestMetaData(FixtureAPITestCase):
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
metadata.refresh_from_db()
self.assertEqual(metadata.index, 1)
self.assertEqual(metadata.value, '333,444')
def test_patch_unallowed_metadata(self):
......
......@@ -269,7 +269,7 @@ class TestParentsElements(FixtureAPITestCase):
element = self.corpus.elements.create(type=element_type, name=f'Element {i}')
self.page.add_parent(element)
element.metadatas.create(name='number', type=MetaType.Numeric, value=str(i))
element.metadatas.create(name='number', type=MetaType.Text, value='a', index=1)
element.metadatas.create(name='number', type=MetaType.Text, value='a')
# operator, value, expected element names
parameters = [
......
......@@ -596,6 +596,13 @@ if DEBUG and not TEST_ENV:
try:
import django_extensions # noqa
INSTALLED_APPS.append('django_extensions')
# With disable_existing_loggers=True, Django's logging config causes manage.py runserver_plus --print-sql
# to no longer be able to log SQL queries.
# https://github.com/django-extensions/django-extensions/issues/1626#issuecomment-774698668
LOGGING['loggers']['django_extensions.management.commands.runserver_plus'] = {
'handlers': ['console'],
'level': 'INFO',
}
except ImportError:
pass
......@@ -609,6 +616,7 @@ except ImportError:
if SENTRY_DSN and not DJANGO_SHELL:
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.rq import RqIntegration
from sentry_sdk.integrations.logging import ignore_logger
ignore_logger('django.security.DisallowedHost')
......@@ -618,5 +626,8 @@ if SENTRY_DSN and not DJANGO_SHELL:
release=f'arkindex-backend@{VERSION}',
debug=DEBUG,
send_default_pii=True,
integrations=[DjangoIntegration()],
integrations=[
DjangoIntegration(),
RqIntegration(),
],
)
......@@ -90,14 +90,13 @@ SELECT "documents_metadata"."id",
"documents_metadata"."name",
"documents_metadata"."type",
"documents_metadata"."value",
"documents_metadata"."index",
"documents_metadata"."entity_id",
"documents_metadata"."worker_version_id"
FROM "documents_metadata"
WHERE "documents_metadata"."element_id" IN ('{page_id}'::uuid)
ORDER BY "documents_metadata"."element_id" ASC,
"documents_metadata"."name" ASC,
"documents_metadata"."index" ASC;
"documents_metadata"."id" ASC;
SELECT ("documents_transcriptionentity"."transcription_id") AS "_prefetch_related_val_transcription_id",
"documents_entity"."id",
......