Skip to content
Snippets Groups Projects
Commit 6e916183 authored by Erwan Rouchet's avatar Erwan Rouchet Committed by Bastien Abadie
Browse files

Make the maximum Artifact file size configurable

parent f231e359
No related branches found
No related tags found
1 merge request!1902Vore Ponos server-side code
# Generated by Django 4.1.3 on 2023-01-09 09:52
from django.core.validators import MinValueValidator
from django.db import migrations, models
from ponos.models import artifact_max_size
from ponos.validators import MaxValueValidator
class Migration(migrations.Migration):
dependencies = [
("ponos", "0033_task_shm_size"),
]
operations = [
migrations.AlterField(
model_name="artifact",
name="size",
field=models.BigIntegerField(
validators=[MinValueValidator(1), MaxValueValidator(artifact_max_size)]
),
),
]
......@@ -10,12 +10,7 @@ from botocore.exceptions import ClientError
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import (
MaxValueValidator,
MinLengthValidator,
MinValueValidator,
RegexValidator,
)
from django.core.validators import MinLengthValidator, MinValueValidator, RegexValidator
from django.db import models, transaction
from django.db.models import Q
from django.urls import reverse
......@@ -30,6 +25,7 @@ from ponos.fields import CommaSeparatedListField, StringDictField
from ponos.keys import gen_nonce
from ponos.managers import TaskManager
from ponos.recipe import parse_recipe, recipe_depth
from ponos.validators import MaxValueValidator
# Maximum allowed time until an agent is considered inactive since last request
AGENT_TIMEOUT = timedelta(
......@@ -775,6 +771,19 @@ class Task(models.Model):
return "[Logs were truncated]\n" + text
def artifact_max_size():
"""
AWS restricts uploads to 5GiB per PUT request,
but some S3 implementations might not have this restriction,
so we default to 5 GiB and allow overriding through the Django settings.
We use a function to allow this setting to change at runtime,
which makes unit testing a lot easier.
"""
setting = getattr(settings, "PONOS_ARTIFACT_MAX_SIZE", None)
return setting if setting is not None else 5 * 1024**3
class Artifact(models.Model):
"""
A task Artifact (Json report, docker images, ML Models...)
......@@ -786,8 +795,7 @@ class Artifact(models.Model):
size = models.BigIntegerField(
validators=[
MinValueValidator(1),
# AWS restricts uploads to 5GiB per PUT request
MaxValueValidator(5 * 1024**3),
MaxValueValidator(artifact_max_size),
]
)
content_type = models.CharField(max_length=250, default="application/octet-stream")
......
from django.core import validators
class HiddenCallableValidatorMixin(object):
"""
Implements a workaround for some issues with error messages in DRF
and with drf-spectacular OpenAPI schema generation when the `limit_value`
of any validator extending django.core.validators.BaseValidator is
a callable. This rewrites `self.limit_value` as a property,
which calls the original limit value when it is callable while making
Django, DRF and Spectacular believe it isn't callable.
https://github.com/encode/django-rest-framework/discussions/8833
https://github.com/tfranzel/drf-spectacular/issues/913
"""
def __init__(self, limit_value, message=None):
self._limit_value = limit_value
if message:
self.message = message
@property
def limit_value(self):
return self._limit_value() if callable(self._limit_value) else self._limit_value
class MaxValueValidator(HiddenCallableValidatorMixin, validators.MaxValueValidator):
pass
......@@ -10,6 +10,7 @@ from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.hashes import SHA256
from cryptography.hazmat.primitives.kdf.hkdf import HKDF
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
from django.conf import settings
from django.contrib.auth.models import User
from django.test import override_settings
from django.urls import reverse
......@@ -1012,18 +1013,28 @@ class TestAPI(APITestCase):
resp.json(), {"path": ["An artifact with this path already exists"]}
)
@override_settings()
def test_create_artifact_size_limits(self):
params = [
(-42, "Ensure this value is greater than or equal to 1."),
(0, "Ensure this value is greater than or equal to 1."),
(None, -42, "Ensure this value is greater than or equal to 1."),
(None, 0, "Ensure this value is greater than or equal to 1."),
(
None,
5 * 1024**3 + 1,
"Ensure this value is less than or equal to 5368709120.",
),
(123456789000, -42, "Ensure this value is greater than or equal to 1."),
(123456789000, 0, "Ensure this value is greater than or equal to 1."),
(
123456789000,
987654321000,
"Ensure this value is less than or equal to 123456789000.",
),
]
for size, expected_error in params:
for size_setting, size, expected_error in params:
with self.subTest(size=size):
settings.PONOS_ARTIFACT_MAX_SIZE = size_setting
url = reverse("ponos:task-artifacts", args=[self.task1.id])
resp = self.client.post(
url,
......@@ -1032,9 +1043,7 @@ class TestAPI(APITestCase):
"content_type": "text/plain",
"size": size,
},
HTTP_AUTHORIZATION="Bearer {}".format(
self.agent.token.access_token
),
HTTP_AUTHORIZATION=f"Bearer {self.agent.token.access_token}",
)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(resp.json(), {"size": [expected_error]})
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment