Add max_folder_size

while max_blob_size limits one specific blob,
max_folder_size will limit the whole folder size.
i.e. if max_folder_size = 100Mb, you can upload 100
blobs with size 1Mb, or a big one with size 100Mb.

Default max folder size is 2550 megabytes.

Change-Id: I845c493db14d5d1884ace29cbcf6b5707a48250a
This commit is contained in:
Idan 2017-07-02 09:29:59 +00:00 committed by Mike Fedosin
parent 81e259fac9
commit 010726cd47
5 changed files with 65 additions and 4 deletions

View File

@ -316,10 +316,21 @@ class Engine(object):
except Exception as e:
raise exception.BadRequest(message=str(e))
max_allowed_size = af.get_max_blob_size(field_name)
# Check if we wanna upload to a folder (and not just to a Blob)
if blob_key is not None:
blobs_dict = getattr(af, field_name)
overall_folder_size = sum(blob["size"] for blob
in blobs_dict.values())
max_folder_size_allowed_ = af.get_max_folder_size(field_name) \
- overall_folder_size # always non-negative
max_allowed_size = min(max_allowed_size,
max_folder_size_allowed_)
default_store = af.get_default_store(
context, af, field_name, blob_key)
location_uri, size, checksums = store_api.save_blob_to_store(
blob_id, fd, context, af.get_max_blob_size(field_name),
blob_id, fd, context, max_allowed_size,
store_type=default_store)
except Exception:
# if upload failed remove blob from db and storage

View File

@ -754,6 +754,15 @@ class BaseArtifact(base.VersionedObject):
"""
return getattr(cls.fields[field_name], 'max_blob_size')
@classmethod
def get_max_folder_size(cls, field_name):
"""Get the maximum allowed folder size in bytes.
:param field_name: folder (blob dict) field name
:return: maximum folder size in bytes
"""
return getattr(cls.fields[field_name], 'max_folder_size')
@classmethod
def validate_upload_allowed(cls, af, field_name, blob_key=None):
"""Validate if given blob is ready for uploading.

View File

@ -24,7 +24,8 @@ FILTERS = (
FILTER_EQ, FILTER_NEQ, FILTER_IN, FILTER_GT, FILTER_GTE, FILTER_LT,
FILTER_LTE) = ('eq', 'neq', 'in', 'gt', 'gte', 'lt', 'lte')
DEFAULT_MAX_BLOB_SIZE = 10485760
DEFAULT_MAX_BLOB_SIZE = 10485760 # 10 Megabytes
DEFAULT_MAX_FOLDER_SIZE = 2673868800 # 2550 Megabytes
class Field(object):
@ -246,11 +247,14 @@ class BlobField(Field):
class FolderField(DictField):
def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE, **kwargs):
def __init__(self, max_blob_size=DEFAULT_MAX_BLOB_SIZE,
max_folder_size=DEFAULT_MAX_FOLDER_SIZE, **kwargs):
super(FolderField, self).__init__(
element_type=glare_fields.BlobFieldType, **kwargs)
self.max_blob_size = int(max_blob_size)
self.max_folder_size = int(max_folder_size)
self.field_props.append('max_blob_size')
self.field_props.append('max_folder_size')
# Classes below added for backward compatibility. They shouldn't be used

View File

@ -88,6 +88,7 @@ class SampleArtifact(base_artifact.BaseArtifact):
required_on_activate=False,
filter_ops=(wrappers.FILTER_EQ,)),
'dict_of_blobs': Folder(required_on_activate=False,
max_folder_size=2000,
validators=[
validators.MaxDictKeyLen(1000)]),
'string_mutable': Field(fields.StringField,

View File

@ -40,7 +40,7 @@ class TestArtifactUpload(base.BaseTestArtifactAPI):
self.assertEqual(3, artifact['blob']['size'])
self.assertEqual('active', artifact['blob']['status'])
def test_size_too_big(self):
def test_blob_size_too_big(self):
# small blob size is limited by 10 bytes
self.assertRaises(
exc.RequestEntityTooLarge, self.controller.upload_blob,
@ -116,6 +116,42 @@ class TestArtifactUpload(base.BaseTestArtifactAPI):
self.assertEqual(3, artifact['dict_of_blobs']['blb2']['size'])
self.assertEqual('active', artifact['dict_of_blobs']['blb2']['status'])
def test_upload_oversized_blob_dict(self):
self.controller.upload_blob(
self.req, 'sample_artifact', self.sample_artifact['id'],
'dict_of_blobs/a',
BytesIO(1800 * b'a'), 'application/octet-stream')
artifact = self.controller.show(self.req, 'sample_artifact',
self.sample_artifact['id'])
self.assertEqual(1800, artifact['dict_of_blobs']['a']['size'])
self.assertEqual('active', artifact['dict_of_blobs']['a']['status'])
# upload another one
self.controller.upload_blob(
self.req, 'sample_artifact', self.sample_artifact['id'],
'dict_of_blobs/b',
BytesIO(199 * b'b'), 'application/octet-stream')
artifact = self.controller.show(self.req, 'sample_artifact',
self.sample_artifact['id'])
self.assertEqual(199, artifact['dict_of_blobs']['b']['size'])
self.assertEqual('active', artifact['dict_of_blobs']['b']['status'])
# upload to have size of 2000 bytes exactly
self.controller.upload_blob(
self.req, 'sample_artifact', self.sample_artifact['id'],
'dict_of_blobs/c',
BytesIO(b'c'), 'application/octet-stream')
artifact = self.controller.show(self.req, 'sample_artifact',
self.sample_artifact['id'])
self.assertEqual(1, artifact['dict_of_blobs']['c']['size'])
self.assertEqual('active', artifact['dict_of_blobs']['c']['status'])
# Upload to have more than max folder limit, more than 2000
self.assertRaises(
exc.RequestEntityTooLarge, self.controller.upload_blob,
self.req, 'sample_artifact', self.sample_artifact['id'],
'dict_of_blobs/d', BytesIO(b'd'), 'application/octet-stream')
def test_existing_blob_dict_key(self):
self.controller.upload_blob(
self.req, 'sample_artifact', self.sample_artifact['id'],