diff --git a/glare/engine.py b/glare/engine.py index 2f052f0..6ab18e4 100644 --- a/glare/engine.py +++ b/glare/engine.py @@ -316,10 +316,21 @@ class Engine(object): except Exception as e: raise exception.BadRequest(message=str(e)) + max_allowed_size = af.get_max_blob_size(field_name) + # Check if we wanna upload to a folder (and not just to a Blob) + if blob_key is not None: + blobs_dict = getattr(af, field_name) + overall_folder_size = sum(blob["size"] for blob + in blobs_dict.values()) + max_folder_size_allowed_ = af.get_max_folder_size(field_name) \ + - overall_folder_size # always non-negative + max_allowed_size = min(max_allowed_size, + max_folder_size_allowed_) + default_store = af.get_default_store( context, af, field_name, blob_key) location_uri, size, checksums = store_api.save_blob_to_store( - blob_id, fd, context, af.get_max_blob_size(field_name), + blob_id, fd, context, max_allowed_size, store_type=default_store) except Exception: # if upload failed remove blob from db and storage diff --git a/glare/objects/base.py b/glare/objects/base.py index effa437..a4797f0 100644 --- a/glare/objects/base.py +++ b/glare/objects/base.py @@ -754,6 +754,15 @@ class BaseArtifact(base.VersionedObject): """ return getattr(cls.fields[field_name], 'max_blob_size') + @classmethod + def get_max_folder_size(cls, field_name): + """Get the maximum allowed folder size in bytes. + + :param field_name: folder (blob dict) field name + :return: maximum folder size in bytes + """ + return getattr(cls.fields[field_name], 'max_folder_size') + @classmethod def validate_upload_allowed(cls, af, field_name, blob_key=None): """Validate if given blob is ready for uploading. diff --git a/glare/objects/meta/wrappers.py b/glare/objects/meta/wrappers.py index 64b8e38..d42d7d0 100644 --- a/glare/objects/meta/wrappers.py +++ b/glare/objects/meta/wrappers.py @@ -24,7 +24,8 @@ FILTERS = ( FILTER_EQ, FILTER_NEQ, FILTER_IN, FILTER_GT, FILTER_GTE, FILTER_LT, FILTER_LTE) = ('eq', 'neq', 'in', 'gt', 'gte', 'lt', 'lte') -DEFAULT_MAX_BLOB_SIZE = 10485760 +DEFAULT_MAX_BLOB_SIZE = 10485760 # 10 Megabytes +DEFAULT_MAX_FOLDER_SIZE = 2673868800 # 2550 Megabytes class Field(object): @@ -246,11 +247,14 @@ class BlobField(Field): class FolderField(DictField): - def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE, **kwargs): + def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE, + max_folder_size=DEFAULT_MAX_FOLDER_SIZE, **kwargs): super(FolderField, self).__init__( element_type=glare_fields.BlobFieldType, **kwargs) self.max_blob_size = int(max_blob_size) + self.max_folder_size = int(max_folder_size) self.field_props.append('max_blob_size') + self.field_props.append('max_folder_size') # Classes below added for backward compatibility. They shouldn't be used diff --git a/glare/tests/sample_artifact.py b/glare/tests/sample_artifact.py index bff9f7d..72b68a6 100644 --- a/glare/tests/sample_artifact.py +++ b/glare/tests/sample_artifact.py @@ -88,6 +88,7 @@ class SampleArtifact(base_artifact.BaseArtifact): required_on_activate=False, filter_ops=(wrappers.FILTER_EQ,)), 'dict_of_blobs': Folder(required_on_activate=False, + max_folder_size=2000, validators=[ validators.MaxDictKeyLen(1000)]), 'string_mutable': Field(fields.StringField, diff --git a/glare/tests/unit/api/test_upload.py b/glare/tests/unit/api/test_upload.py index 5e458d2..9dc1ef9 100644 --- a/glare/tests/unit/api/test_upload.py +++ b/glare/tests/unit/api/test_upload.py @@ -40,7 +40,7 @@ class TestArtifactUpload(base.BaseTestArtifactAPI): self.assertEqual(3, artifact['blob']['size']) self.assertEqual('active', artifact['blob']['status']) - def test_size_too_big(self): + def test_blob_size_too_big(self): # small blob size is limited by 10 bytes self.assertRaises( exc.RequestEntityTooLarge, self.controller.upload_blob, @@ -116,6 +116,42 @@ class TestArtifactUpload(base.BaseTestArtifactAPI): self.assertEqual(3, artifact['dict_of_blobs']['blb2']['size']) self.assertEqual('active', artifact['dict_of_blobs']['blb2']['status']) + def test_upload_oversized_blob_dict(self): + self.controller.upload_blob( + self.req, 'sample_artifact', self.sample_artifact['id'], + 'dict_of_blobs/a', + BytesIO(1800 * b'a'), 'application/octet-stream') + artifact = self.controller.show(self.req, 'sample_artifact', + self.sample_artifact['id']) + self.assertEqual(1800, artifact['dict_of_blobs']['a']['size']) + self.assertEqual('active', artifact['dict_of_blobs']['a']['status']) + + # upload another one + self.controller.upload_blob( + self.req, 'sample_artifact', self.sample_artifact['id'], + 'dict_of_blobs/b', + BytesIO(199 * b'b'), 'application/octet-stream') + artifact = self.controller.show(self.req, 'sample_artifact', + self.sample_artifact['id']) + self.assertEqual(199, artifact['dict_of_blobs']['b']['size']) + self.assertEqual('active', artifact['dict_of_blobs']['b']['status']) + + # upload to have size of 2000 bytes exactly + self.controller.upload_blob( + self.req, 'sample_artifact', self.sample_artifact['id'], + 'dict_of_blobs/c', + BytesIO(b'c'), 'application/octet-stream') + artifact = self.controller.show(self.req, 'sample_artifact', + self.sample_artifact['id']) + self.assertEqual(1, artifact['dict_of_blobs']['c']['size']) + self.assertEqual('active', artifact['dict_of_blobs']['c']['status']) + + # Upload to have more than max folder limit, more than 2000 + self.assertRaises( + exc.RequestEntityTooLarge, self.controller.upload_blob, + self.req, 'sample_artifact', self.sample_artifact['id'], + 'dict_of_blobs/d', BytesIO(b'd'), 'application/octet-stream') + def test_existing_blob_dict_key(self): self.controller.upload_blob( self.req, 'sample_artifact', self.sample_artifact['id'],