Skip to content
Snippets Groups Projects
Commit 15aa99e0 authored by ml bonhomme's avatar ml bonhomme :bee: Committed by Erwan Rouchet
Browse files

Human readable file max size error in file import

parent 4b21b147
No related branches found
No related tags found
1 merge request!2218Human readable file max size error in file import
......@@ -71,6 +71,9 @@ class DataFileCreateSerializer(serializers.ModelSerializer):
"s3_put_url",
)
read_only_fields = ("id", "status", "s3_url", "s3_put_url")
extra_kwargs = {
"size": {"error_messages": {"max_value": "File size exceeds maximum limit of 2GB."}}
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
......
......@@ -74,6 +74,18 @@ class TestDataFileApi(FixtureAPITestCase):
["some text", 1, "http://s3/upload_put_url"]
)
@patch("arkindex.project.aws.s3.meta.client.generate_presigned_url")
def test_create_datafile_size_exceeded(self, s3_presigned_url_mock):
self.client.force_login(self.user)
s3_presigned_url_mock.return_value = "http://s3/upload_put_url"
request = self.build_file_create_request(size=3147483647)
response = self.client.post(reverse("api:file-create"), request)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictEqual(
response.json(),
{"size": ["File size exceeds maximum limit of 2GB."]}
)
@patch("arkindex.project.aws.s3.Object")
def test_check_uploaded_datafile(self, s3_object):
s3_object().content_length = 42
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment