Enforce image_size_cap on v2 upload

image_size_cap should be checked and enforced on upload

Enforcement is in two places:
- on image metadata save
- during image save to backend store

Closes-Bug: 1315321
Change-Id: I45bfb360703617bc394e9e27fe17adf43b09c0e1
Co-Author: Manuel Desbonnet <manuel.desbonnet@hp.com>
This commit is contained in:
Tom Leaman 2014-05-02 10:09:20 +00:00 committed by Manuel Desbonnet
parent f2e6001c3e
commit 92ab00fca6
6 changed files with 61 additions and 4 deletions

View File

@ -27,6 +27,7 @@ from glance.openstack.common import importutils
CONF = cfg.CONF
CONF.import_opt('image_size_cap', 'glance.common.config')
CONF.import_opt('metadata_encryption_key', 'glance.common.config')
@ -150,6 +151,8 @@ class ImageRepo(object):
def add(self, image):
image_values = self._format_image_to_db(image)
if image_values['size'] > CONF.image_size_cap:
raise exception.ImageSizeLimitExceeded
# the updated_at value is not set in the _format_image_to_db
# function since it is specific to image create
image_values['updated_at'] = image.updated_at
@ -161,6 +164,8 @@ class ImageRepo(object):
def save(self, image):
image_values = self._format_image_to_db(image)
if image_values['size'] > CONF.image_size_cap:
raise exception.ImageSizeLimitExceeded
try:
new_values = self.db_api.image_update(self.context,
image.image_id,

View File

@ -354,7 +354,10 @@ class ImageProxy(glance.domain.proxy.Image):
size = 0 # NOTE(markwash): zero -> unknown size
location, size, checksum, loc_meta = self.store_api.add_to_backend(
self.context, CONF.default_store,
self.image.image_id, utils.CooperativeReader(data), size)
self.image.image_id,
utils.LimitingReader(utils.CooperativeReader(data),
CONF.image_size_cap),
size)
self.image.locations = [{'url': location, 'metadata': loc_meta,
'status': 'active'}]
self.image.size = size

View File

@ -279,6 +279,7 @@ class ApiServer(Server):
self.pid_file = pid_file or os.path.join(self.test_dir, "api.pid")
self.scrubber_datadir = os.path.join(self.test_dir, "scrubber")
self.log_file = os.path.join(self.test_dir, "api.log")
self.image_size_cap = 1099511627776
self.s3_store_host = "s3.amazonaws.com"
self.s3_store_access_key = ""
self.s3_store_secret_key = ""
@ -342,6 +343,7 @@ metadata_encryption_key = %(metadata_encryption_key)s
registry_host = 127.0.0.1
registry_port = %(registry_port)s
log_file = %(log_file)s
image_size_cap = %(image_size_cap)d
s3_store_host = %(s3_store_host)s
s3_store_access_key = %(s3_store_access_key)s
s3_store_secret_key = %(s3_store_secret_key)s

View File

@ -520,6 +520,48 @@ class TestImages(functional.FunctionalTest):
self.stop_servers()
def test_image_size_cap(self):
self.api_server.image_size_cap = 128
self.start_servers(**self.__dict__.copy())
# create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-size-cap-test-image',
'type': 'kernel', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
#try to populate it with oversized data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
class StreamSim(object):
# Using a one-shot iterator to force chunked transfer in the PUT
# request
def __init__(self, size):
self.size = size
def __iter__(self):
yield 'Z' * self.size
response = requests.put(path, headers=headers, data=StreamSim(
self.api_server.image_size_cap + 1))
self.assertEqual(413, response.status_code)
# hashlib.md5('Z'*129).hexdigest()
# == '76522d28cb4418f12704dfa7acd6e7ee'
# If the image has this checksum, it means that the whole stream was
# accepted and written to the store, which should not be the case.
path = self._url('/v2/images/{0}'.format(image_id))
headers = self._headers({'content-type': 'application/json'})
response = requests.get(path, headers=headers)
image_checksum = jsonutils.loads(response.text).get('checksum')
self.assertNotEqual(image_checksum, '76522d28cb4418f12704dfa7acd6e7ee')
def test_permissions(self):
self.start_servers(**self.__dict__.copy())
# Create an image that belongs to TENANT1

View File

@ -124,8 +124,10 @@ class TestStoreImage(utils.BaseTestCase):
self.stubs.Set(unit_test_utils.FakeStoreAPI, 'get_from_backend',
fake_get_from_backend)
self.assertEqual(image1.get_data().fd, 'ZZZ')
# This time, image1.get_data() returns the data wrapped in a
# LimitingReader|CooperativeReader pipeline, so peeking under
# the hood of those objects to get at the underlying string.
self.assertEqual(image1.get_data().data.fd, 'ZZZ')
image1.locations.pop(0)
self.assertEqual(len(image1.locations), 1)
image2.delete()

View File

@ -160,7 +160,10 @@ class FakeStoreAPI(object):
if image_id in location:
raise exception.Duplicate()
if not size:
size = len(data.fd)
# 'data' is a string wrapped in a LimitingReader|CooperativeReader
# pipeline, so peek under the hood of those objects to get at the
# string itself.
size = len(data.data.fd)
if (current_store_size + size) > store_max_size:
raise exception.StorageFull()
if context.user == USER2: