Enforce image_size_cap on v2 upload

image_size_cap should be checked and enforced on upload

Enforcement is in two places:
- on image metadata save
- during image save to backend store

(cherry picked from commit 92ab00fca6)
Conflicts:
	glance/location.py
	glance/tests/functional/v2/test_images.py

Closes-Bug: 1315321
Change-Id: I45bfb360703617bc394e9e27fe17adf43b09c0e1
Co-Author: Manuel Desbonnet <manuel.desbonnet@hp.com>
This commit is contained in:
Tom Leaman 2014-05-02 10:09:20 +00:00 committed by Manuel Desbonnet
parent 7c5c7782c5
commit 31a4d1852a
6 changed files with 61 additions and 4 deletions

View File

@ -27,6 +27,7 @@ from glance.openstack.common import importutils
CONF = cfg.CONF CONF = cfg.CONF
CONF.import_opt('image_size_cap', 'glance.common.config')
CONF.import_opt('metadata_encryption_key', 'glance.common.config') CONF.import_opt('metadata_encryption_key', 'glance.common.config')
@ -150,6 +151,8 @@ class ImageRepo(object):
def add(self, image): def add(self, image):
image_values = self._format_image_to_db(image) image_values = self._format_image_to_db(image)
if image_values['size'] > CONF.image_size_cap:
raise exception.ImageSizeLimitExceeded
# the updated_at value is not set in the _format_image_to_db # the updated_at value is not set in the _format_image_to_db
# function since it is specific to image create # function since it is specific to image create
image_values['updated_at'] = image.updated_at image_values['updated_at'] = image.updated_at
@ -161,6 +164,8 @@ class ImageRepo(object):
def save(self, image): def save(self, image):
image_values = self._format_image_to_db(image) image_values = self._format_image_to_db(image)
if image_values['size'] > CONF.image_size_cap:
raise exception.ImageSizeLimitExceeded
try: try:
new_values = self.db_api.image_update(self.context, new_values = self.db_api.image_update(self.context,
image.image_id, image.image_id,

View File

@ -721,7 +721,10 @@ class ImageProxy(glance.domain.proxy.Image):
size = 0 # NOTE(markwash): zero -> unknown size size = 0 # NOTE(markwash): zero -> unknown size
location, size, checksum, loc_meta = self.store_api.add_to_backend( location, size, checksum, loc_meta = self.store_api.add_to_backend(
self.context, CONF.default_store, self.context, CONF.default_store,
self.image.image_id, utils.CooperativeReader(data), size) self.image.image_id,
utils.LimitingReader(utils.CooperativeReader(data),
CONF.image_size_cap),
size)
self.image.locations = [{'url': location, 'metadata': loc_meta}] self.image.locations = [{'url': location, 'metadata': loc_meta}]
self.image.size = size self.image.size = size
self.image.checksum = checksum self.image.checksum = checksum

View File

@ -280,6 +280,7 @@ class ApiServer(Server):
self.pid_file = pid_file or os.path.join(self.test_dir, "api.pid") self.pid_file = pid_file or os.path.join(self.test_dir, "api.pid")
self.scrubber_datadir = os.path.join(self.test_dir, "scrubber") self.scrubber_datadir = os.path.join(self.test_dir, "scrubber")
self.log_file = os.path.join(self.test_dir, "api.log") self.log_file = os.path.join(self.test_dir, "api.log")
self.image_size_cap = 1099511627776
self.s3_store_host = "s3.amazonaws.com" self.s3_store_host = "s3.amazonaws.com"
self.s3_store_access_key = "" self.s3_store_access_key = ""
self.s3_store_secret_key = "" self.s3_store_secret_key = ""
@ -341,6 +342,7 @@ metadata_encryption_key = %(metadata_encryption_key)s
registry_host = 127.0.0.1 registry_host = 127.0.0.1
registry_port = %(registry_port)s registry_port = %(registry_port)s
log_file = %(log_file)s log_file = %(log_file)s
image_size_cap = %(image_size_cap)d
s3_store_host = %(s3_store_host)s s3_store_host = %(s3_store_host)s
s3_store_access_key = %(s3_store_access_key)s s3_store_access_key = %(s3_store_access_key)s
s3_store_secret_key = %(s3_store_secret_key)s s3_store_secret_key = %(s3_store_secret_key)s

View File

@ -451,6 +451,48 @@ class TestImages(functional.FunctionalTest):
self.stop_servers() self.stop_servers()
def test_image_size_cap(self):
self.api_server.image_size_cap = 128
self.start_servers(**self.__dict__.copy())
# create an image
path = self._url('/v2/images')
headers = self._headers({'content-type': 'application/json'})
data = jsonutils.dumps({'name': 'image-size-cap-test-image',
'type': 'kernel', 'disk_format': 'aki',
'container_format': 'aki'})
response = requests.post(path, headers=headers, data=data)
self.assertEqual(201, response.status_code)
image = jsonutils.loads(response.text)
image_id = image['id']
#try to populate it with oversized data
path = self._url('/v2/images/%s/file' % image_id)
headers = self._headers({'Content-Type': 'application/octet-stream'})
class StreamSim(object):
# Using a one-shot iterator to force chunked transfer in the PUT
# request
def __init__(self, size):
self.size = size
def __iter__(self):
yield 'Z' * self.size
response = requests.put(path, headers=headers, data=StreamSim(
self.api_server.image_size_cap + 1))
self.assertEqual(413, response.status_code)
# hashlib.md5('Z'*129).hexdigest()
# == '76522d28cb4418f12704dfa7acd6e7ee'
# If the image has this checksum, it means that the whole stream was
# accepted and written to the store, which should not be the case.
path = self._url('/v2/images/{0}'.format(image_id))
headers = self._headers({'content-type': 'application/json'})
response = requests.get(path, headers=headers)
image_checksum = jsonutils.loads(response.text).get('checksum')
self.assertNotEqual(image_checksum, '76522d28cb4418f12704dfa7acd6e7ee')
def test_permissions(self): def test_permissions(self):
# Create an image that belongs to TENANT1 # Create an image that belongs to TENANT1
path = self._url('/v2/images') path = self._url('/v2/images')

View File

@ -119,8 +119,10 @@ class TestStoreImage(utils.BaseTestCase):
self.stubs.Set(unit_test_utils.FakeStoreAPI, 'get_from_backend', self.stubs.Set(unit_test_utils.FakeStoreAPI, 'get_from_backend',
fake_get_from_backend) fake_get_from_backend)
# This time, image1.get_data() returns the data wrapped in a
self.assertEqual(image1.get_data().fd, 'ZZZ') # LimitingReader|CooperativeReader pipeline, so peeking under
# the hood of those objects to get at the underlying string.
self.assertEqual(image1.get_data().data.fd, 'ZZZ')
image1.locations.pop(0) image1.locations.pop(0)
self.assertEqual(len(image1.locations), 1) self.assertEqual(len(image1.locations), 1)
image2.delete() image2.delete()

View File

@ -148,7 +148,10 @@ class FakeStoreAPI(object):
if image_id in location: if image_id in location:
raise exception.Duplicate() raise exception.Duplicate()
if not size: if not size:
size = len(data.fd) # 'data' is a string wrapped in a LimitingReader|CooperativeReader
# pipeline, so peek under the hood of those objects to get at the
# string itself.
size = len(data.data.fd)
if (current_store_size + size) > store_max_size: if (current_store_size + size) > store_max_size:
raise exception.StorageFull() raise exception.StorageFull()
if context.user == USER2: if context.user == USER2: