Adding status field to image location -- domain and APIs changes
Adding a status field to image's each location property, each location status can be 'active', 'pending_delete' and 'deleted'. Under location's status information Scrubber service can make cleanup based on DB records also but not a dedicated queue-file for each image. This is second part of this change which covered DB API, domain and REST API. Partially-Implements BP: image-location-status Change-Id: I744679e2dadbaec099aef33d8c5a3fe4ecf96865 Signed-off-by: Zhi Yan Liu <zhiyanl@cn.ibm.com>
This commit is contained in:
parent
b937751ff8
commit
66d24bb1a1
@ -607,30 +607,26 @@ class Controller(controller.BaseController):
|
||||
|
||||
self.notifier.info("image.prepare", redact_loc(image_meta))
|
||||
|
||||
image_meta, location, loc_meta = upload_utils.upload_data_to_store(
|
||||
image_meta, location_data = upload_utils.upload_data_to_store(
|
||||
req, image_meta, image_data, store, self.notifier)
|
||||
|
||||
self.notifier.info('image.upload', redact_loc(image_meta))
|
||||
|
||||
return location, loc_meta
|
||||
return location_data
|
||||
|
||||
def _activate(self, req, image_id, location, location_metadata=None,
|
||||
from_state=None):
|
||||
def _activate(self, req, image_id, location_data, from_state=None):
|
||||
"""
|
||||
Sets the image status to `active` and the image's location
|
||||
attribute.
|
||||
|
||||
:param req: The WSGI/Webob Request object
|
||||
:param image_id: Opaque image identifier
|
||||
:param location: Location of where Glance stored this image
|
||||
:param location_metadata: a dictionary of storage specific information
|
||||
:param location_data: Location of where Glance stored this image
|
||||
"""
|
||||
image_meta = {}
|
||||
image_meta['location'] = location
|
||||
image_meta['location'] = location_data['url']
|
||||
image_meta['status'] = 'active'
|
||||
if location_metadata:
|
||||
image_meta['location_data'] = [{'url': location,
|
||||
'metadata': location_metadata}]
|
||||
image_meta['location_data'] = [location_data]
|
||||
|
||||
try:
|
||||
s = from_state
|
||||
@ -669,17 +665,11 @@ class Controller(controller.BaseController):
|
||||
|
||||
:retval Mapping of updated image data
|
||||
"""
|
||||
image_id = image_meta['id']
|
||||
# This is necessary because of a bug in Webob 1.0.2 - 1.0.7
|
||||
# See: https://bitbucket.org/ianb/webob/
|
||||
# issue/12/fix-for-issue-6-broke-chunked-transfer
|
||||
req.is_body_readable = True
|
||||
location, location_metadata = self._upload(req, image_meta)
|
||||
location_data = self._upload(req, image_meta)
|
||||
return self._activate(req,
|
||||
image_id,
|
||||
location,
|
||||
location_metadata,
|
||||
from_state='saving') if location else None
|
||||
image_meta['id'],
|
||||
location_data,
|
||||
from_state='saving') if location_data else None
|
||||
|
||||
def _get_size(self, context, image_meta, location):
|
||||
# retrieve the image size from remote store (if not provided)
|
||||
@ -736,7 +726,9 @@ class Controller(controller.BaseController):
|
||||
raise HTTPConflict(explanation=msg,
|
||||
request=req,
|
||||
content_type="text/plain")
|
||||
image_meta = self._activate(req, image_id, location)
|
||||
location_data = {'url': location, 'metadata': {},
|
||||
'status': 'active'}
|
||||
image_meta = self._activate(req, image_id, location_data)
|
||||
return image_meta
|
||||
|
||||
def _validate_image_for_activation(self, req, id, values):
|
||||
@ -1034,8 +1026,9 @@ class Controller(controller.BaseController):
|
||||
# to delete the image if the backend doesn't yet store it.
|
||||
# See https://bugs.launchpad.net/glance/+bug/747799
|
||||
if image['location']:
|
||||
upload_utils.initiate_deletion(req, image['location'], id,
|
||||
CONF.delayed_delete)
|
||||
for loc_data in image['location_data']:
|
||||
if loc_data['status'] == 'active':
|
||||
upload_utils.initiate_deletion(req, loc_data, id)
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
registry.update_image_metadata(req.context, id,
|
||||
|
@ -17,32 +17,29 @@ from oslo.config import cfg
|
||||
import webob.exc
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import store_utils
|
||||
from glance.common import utils
|
||||
import glance.db
|
||||
from glance.openstack.common import excutils
|
||||
import glance.openstack.common.log as logging
|
||||
import glance.registry.client.v1.api as registry
|
||||
import glance.store
|
||||
import glance.store as store_api
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initiate_deletion(req, location, id, delayed_delete=False):
|
||||
def initiate_deletion(req, location_data, id):
|
||||
"""
|
||||
Deletes image data from the backend store.
|
||||
Deletes image data from the location of backend store.
|
||||
|
||||
:param req: The WSGI/Webob Request object
|
||||
:param location: URL to the image data in a data store
|
||||
:param image_id: Opaque image identifier
|
||||
:param delayed_delete: whether data deletion will be delayed
|
||||
:param location_data: Location to the image data in a data store
|
||||
:param id: Opaque image identifier
|
||||
"""
|
||||
if delayed_delete:
|
||||
glance.store.schedule_delayed_delete_from_backend(req.context,
|
||||
location, id)
|
||||
else:
|
||||
glance.store.safe_delete_from_backend(req.context, location, id)
|
||||
store_utils.delete_image_location_from_backend(req.context,
|
||||
id, location_data)
|
||||
|
||||
|
||||
def _kill(req, image_id):
|
||||
@ -89,15 +86,19 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
if remaining is not None:
|
||||
image_data = utils.LimitingReader(image_data, remaining)
|
||||
|
||||
(location,
|
||||
(uri,
|
||||
size,
|
||||
checksum,
|
||||
locations_metadata) = glance.store.store_add_to_backend(
|
||||
location_metadata) = store_api.store_add_to_backend(
|
||||
image_meta['id'],
|
||||
utils.CooperativeReader(image_data),
|
||||
image_meta['size'],
|
||||
store)
|
||||
|
||||
location_data = {'url': uri,
|
||||
'metadata': location_metadata,
|
||||
'status': 'active'}
|
||||
|
||||
try:
|
||||
# recheck the quota in case there were simultaneous uploads that
|
||||
# did not provide the size
|
||||
@ -107,8 +108,8 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.info(_('Cleaning up %s after exceeding '
|
||||
'the quota') % image_id)
|
||||
glance.store.safe_delete_from_backend(
|
||||
location, req.context, image_meta['id'])
|
||||
store_utils.safe_delete_from_backend(
|
||||
req.context, image_meta['id'], location_data)
|
||||
|
||||
def _kill_mismatched(image_meta, attr, actual):
|
||||
supplied = image_meta.get(attr)
|
||||
@ -121,7 +122,7 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
'actual': actual})
|
||||
LOG.error(msg)
|
||||
safe_kill(req, image_id)
|
||||
initiate_deletion(req, location, image_id, CONF.delayed_delete)
|
||||
initiate_deletion(req, location_data, image_id)
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg,
|
||||
content_type="text/plain",
|
||||
request=req)
|
||||
@ -153,10 +154,10 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
# NOTE(jculp): we need to clean up the datastore if an image
|
||||
# resource is deleted while the image data is being uploaded
|
||||
#
|
||||
# We get "location" from above call to store.add(), any
|
||||
# We get "location_data" from above call to store.add(), any
|
||||
# exceptions that occur there handle this same issue internally,
|
||||
# Since this is store-agnostic, should apply to all stores.
|
||||
initiate_deletion(req, location, image_id, CONF.delayed_delete)
|
||||
initiate_deletion(req, location_data, image_id)
|
||||
raise webob.exc.HTTPPreconditionFailed(explanation=msg,
|
||||
request=req,
|
||||
content_type='text/plain')
|
||||
@ -248,4 +249,4 @@ def upload_data_to_store(req, image_meta, image_data, store, notifier):
|
||||
request=req,
|
||||
content_type='text/plain')
|
||||
|
||||
return image_meta, location, locations_metadata
|
||||
return image_meta, location_data
|
||||
|
@ -122,7 +122,7 @@ class ImagesController(object):
|
||||
change_method(req, image, change)
|
||||
|
||||
if changes:
|
||||
image_repo.save(image)
|
||||
image_repo.save(image)
|
||||
except exception.NotFound as e:
|
||||
raise webob.exc.HTTPNotFound(explanation=e.msg)
|
||||
except exception.Forbidden as e:
|
||||
@ -580,19 +580,30 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
|
||||
image_view['updated_at'] = timeutils.isotime(image.updated_at)
|
||||
|
||||
if CONF.show_multiple_locations:
|
||||
if image.locations:
|
||||
image_view['locations'] = list(image.locations)
|
||||
locations = list(image.locations)
|
||||
if locations:
|
||||
image_view['locations'] = []
|
||||
for loc in locations:
|
||||
tmp = dict(loc)
|
||||
tmp.pop('id', None)
|
||||
tmp.pop('status', None)
|
||||
image_view['locations'].append(tmp)
|
||||
else:
|
||||
# NOTE (flwang): We will still show "locations": [] if
|
||||
# image.locations is None to indicate it's allowed to show
|
||||
# locations but it's just non-existent.
|
||||
image_view['locations'] = []
|
||||
LOG.debug("There is not available location "
|
||||
"for image %s" % image.image_id)
|
||||
|
||||
if CONF.show_image_direct_url and image.locations:
|
||||
# Choose best location configured strategy
|
||||
best_location = (
|
||||
location_strategy.choose_best_location(image.locations))
|
||||
image_view['direct_url'] = best_location['url']
|
||||
if CONF.show_image_direct_url:
|
||||
if image.locations:
|
||||
# Choose best location configured strategy
|
||||
l = location_strategy.choose_best_location(image.locations)
|
||||
image_view['direct_url'] = l['url']
|
||||
else:
|
||||
LOG.debug("There is not available location "
|
||||
"for image %s" % image.image_id)
|
||||
|
||||
image_view['tags'] = list(image.tags)
|
||||
image_view['self'] = self._get_image_href(image)
|
||||
|
121
glance/common/store_utils.py
Normal file
121
glance/common/store_utils.py
Normal file
@ -0,0 +1,121 @@
|
||||
# Copyright 2014 IBM Corp.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import utils
|
||||
import glance.db as db_api
|
||||
from glance.openstack.common import gettextutils
|
||||
import glance.openstack.common.log as logging
|
||||
from glance import scrubber
|
||||
import glance.store as store_api
|
||||
|
||||
_LE = gettextutils._LE
|
||||
_LW = gettextutils._LW
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
store_utils_opts = [
|
||||
cfg.BoolOpt('use_user_token', default=True,
|
||||
help=_('Whether to pass through the user token when '
|
||||
'making requests to the registry.')),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
CONF.register_opts(store_utils_opts)
|
||||
|
||||
|
||||
def safe_delete_from_backend(context, image_id, location):
|
||||
"""
|
||||
Given a location, delete an image from the store and
|
||||
update location status to db.
|
||||
|
||||
This function try to handle all known exceptions which might be raised
|
||||
by those calls on store and DB modules in its implementation.
|
||||
|
||||
:param context: The request context
|
||||
:param image_id: The image identifier
|
||||
:param location: The image location entry
|
||||
"""
|
||||
|
||||
try:
|
||||
ret = store_api.delete_from_backend(context, location['url'])
|
||||
location['status'] = 'deleted'
|
||||
if 'id' in location:
|
||||
db_api.get_api().image_location_delete(context, image_id,
|
||||
location['id'], 'deleted')
|
||||
return ret
|
||||
except exception.NotFound:
|
||||
msg = _LW('Failed to delete image %s in store from URI') % image_id
|
||||
LOG.warn(msg)
|
||||
except exception.StoreDeleteNotSupported as e:
|
||||
LOG.warn(utils.exception_to_str(e))
|
||||
except store_api.UnsupportedBackend:
|
||||
exc_type = sys.exc_info()[0].__name__
|
||||
msg = (_LE('Failed to delete image %(image_id)s from store: %(exc)s') %
|
||||
dict(image_id=image_id, exc=exc_type))
|
||||
LOG.error(msg)
|
||||
|
||||
|
||||
def schedule_delayed_delete_from_backend(context, image_id, location):
|
||||
"""
|
||||
Given a location, schedule the deletion of an image location and
|
||||
update location status to db.
|
||||
|
||||
:param context: The request context
|
||||
:param image_id: The image identifier
|
||||
:param location: The image location entry
|
||||
"""
|
||||
|
||||
(file_queue, _db_queue) = scrubber.get_scrub_queues()
|
||||
if not CONF.use_user_token:
|
||||
context = None
|
||||
# TODO(zhiyan): using location status to do image scrub.
|
||||
ret = file_queue.add_location(image_id, location, user_context=context)
|
||||
if ret:
|
||||
location['status'] = 'pending_delete'
|
||||
if 'id' in location:
|
||||
# NOTE(zhiyan): New added image location entry will has no 'id'
|
||||
# field since it has not been saved to DB.
|
||||
db_api.get_api().image_location_delete(context, image_id,
|
||||
location['id'],
|
||||
'pending_delete')
|
||||
else:
|
||||
db_api.get_api().image_location_add(context, image_id, location)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def delete_image_location_from_backend(context, image_id, location):
|
||||
"""
|
||||
Given a location, immediately or schedule the deletion of an image
|
||||
location and update location status to db.
|
||||
|
||||
:param context: The request context
|
||||
:param image_id: The image identifier
|
||||
:param location: The image location entry
|
||||
"""
|
||||
|
||||
deleted = False
|
||||
if CONF.delayed_delete:
|
||||
deleted = schedule_delayed_delete_from_backend(context,
|
||||
image_id, location)
|
||||
if not deleted:
|
||||
# NOTE(zhiyan) If image metadata has not been saved to DB
|
||||
# such as uploading process failure then we can't use
|
||||
# location status mechanism to support image pending delete.
|
||||
safe_delete_from_backend(context, image_id, location)
|
@ -90,14 +90,12 @@ class ImageRepo(object):
|
||||
# NOTE(markwash) db api requires us to filter deleted
|
||||
if not prop['deleted']:
|
||||
properties[prop['name']] = prop['value']
|
||||
locations = db_image['locations']
|
||||
locations = [loc for loc in db_image['locations']
|
||||
if loc['status'] == 'active']
|
||||
if CONF.metadata_encryption_key:
|
||||
key = CONF.metadata_encryption_key
|
||||
ld = []
|
||||
for l in locations:
|
||||
url = crypt.urlsafe_decrypt(key, l['url'])
|
||||
ld.append({'url': url, 'metadata': l['metadata']})
|
||||
locations = ld
|
||||
l['url'] = crypt.urlsafe_decrypt(key, l['url'])
|
||||
return glance.domain.Image(
|
||||
image_id=db_image['id'],
|
||||
name=db_image['name'],
|
||||
@ -124,9 +122,12 @@ class ImageRepo(object):
|
||||
if CONF.metadata_encryption_key:
|
||||
key = CONF.metadata_encryption_key
|
||||
ld = []
|
||||
for l in locations:
|
||||
url = crypt.urlsafe_encrypt(key, l['url'])
|
||||
ld.append({'url': url, 'metadata': l['metadata']})
|
||||
for loc in locations:
|
||||
url = crypt.urlsafe_encrypt(key, loc['url'])
|
||||
ld.append({'url': url, 'metadata': loc['metadata'],
|
||||
'status': loc['status'],
|
||||
# NOTE(zhiyan): New location has no ID field.
|
||||
'id': loc.get('id')})
|
||||
locations = ld
|
||||
return {
|
||||
'id': image.image_id,
|
||||
|
@ -226,6 +226,13 @@ def image_tag_get_all(client, image_id, session=None):
|
||||
return client.image_tag_get_all(image_id=image_id)
|
||||
|
||||
|
||||
@_get_client
|
||||
def image_location_delete(client, image_id, location_id, status, session=None):
|
||||
"""Delete an image location."""
|
||||
client.image_location_delete(image_id=image_id, location_id=location_id,
|
||||
status=status)
|
||||
|
||||
|
||||
@_get_client
|
||||
def user_get_storage_usage(client, owner_id, image_id=None, session=None):
|
||||
return client.user_get_storage_usage(owner_id=owner_id, image_id=image_id)
|
||||
|
@ -77,17 +77,18 @@ def _get_session():
|
||||
return DATA
|
||||
|
||||
|
||||
def _image_locations_format(image_id, value, meta_data):
|
||||
def _image_location_format(image_id, value, meta_data, status, deleted=False):
|
||||
dt = timeutils.utcnow()
|
||||
return {
|
||||
'id': str(uuid.uuid4()),
|
||||
'image_id': image_id,
|
||||
'created_at': dt,
|
||||
'updated_at': dt,
|
||||
'deleted_at': None,
|
||||
'deleted': False,
|
||||
'deleted_at': dt if deleted else None,
|
||||
'deleted': deleted,
|
||||
'url': value,
|
||||
'metadata': meta_data,
|
||||
'status': status,
|
||||
}
|
||||
|
||||
|
||||
@ -187,12 +188,14 @@ def _image_format(image_id, **values):
|
||||
|
||||
locations = values.pop('locations', None)
|
||||
if locations is not None:
|
||||
locations = [
|
||||
_image_locations_format(image_id, location['url'],
|
||||
location['metadata'])
|
||||
for location in locations
|
||||
]
|
||||
image['locations'] = locations
|
||||
image['locations'] = []
|
||||
for location in locations:
|
||||
location_ref = _image_location_format(image_id,
|
||||
location['url'],
|
||||
location['metadata'],
|
||||
location['status'])
|
||||
image['locations'].append(location_ref)
|
||||
DATA['locations'].append(location_ref)
|
||||
|
||||
#NOTE(bcwaldon): store properties as a list to match sqlalchemy driver
|
||||
properties = values.pop('properties', {})
|
||||
@ -325,8 +328,6 @@ def _sort_images(images, sort_key, sort_dir):
|
||||
def _image_get(context, image_id, force_show_deleted=False, status=None):
|
||||
try:
|
||||
image = DATA['images'][image_id]
|
||||
image['locations'] = _image_location_get_all(image_id)
|
||||
|
||||
except KeyError:
|
||||
LOG.info(_('Could not find image %s') % image_id)
|
||||
raise exception.NotFound()
|
||||
@ -345,8 +346,8 @@ def _image_get(context, image_id, force_show_deleted=False, status=None):
|
||||
@log_call
|
||||
def image_get(context, image_id, session=None, force_show_deleted=False):
|
||||
image = _image_get(context, image_id, force_show_deleted)
|
||||
image = _normalize_locations(image)
|
||||
return copy.deepcopy(image)
|
||||
return _normalize_locations(copy.deepcopy(image),
|
||||
force_show_deleted=force_show_deleted)
|
||||
|
||||
|
||||
@log_call
|
||||
@ -362,13 +363,15 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
images = _do_pagination(context, images, marker, limit,
|
||||
filters.get('deleted'))
|
||||
|
||||
force_show_deleted = True if filters.get('deleted') else False
|
||||
res = []
|
||||
for image in images:
|
||||
image['locations'] = _image_location_get_all(image['id'])
|
||||
img = _normalize_locations(copy.deepcopy(image),
|
||||
force_show_deleted=force_show_deleted)
|
||||
if return_tag:
|
||||
image['tags'] = image_tag_get_all(context, image['id'])
|
||||
_normalize_locations(image)
|
||||
|
||||
return images
|
||||
img['tags'] = image_tag_get_all(context, img['id'])
|
||||
res.append(img)
|
||||
return res
|
||||
|
||||
|
||||
@log_call
|
||||
@ -382,7 +385,7 @@ def image_property_create(context, values):
|
||||
|
||||
|
||||
@log_call
|
||||
def image_property_delete(context, prop_ref, image_ref, session=None):
|
||||
def image_property_delete(context, prop_ref, image_ref):
|
||||
prop = None
|
||||
for p in DATA['images'][image_ref]['properties']:
|
||||
if p['name'] == prop_ref:
|
||||
@ -467,42 +470,127 @@ def image_member_delete(context, member_id):
|
||||
raise exception.NotFound()
|
||||
|
||||
|
||||
def _image_locations_set(image_id, locations):
|
||||
global DATA
|
||||
@log_call
|
||||
def image_location_add(context, image_id, location):
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
location_ref = _image_location_format(image_id,
|
||||
value=location['url'],
|
||||
meta_data=location['metadata'],
|
||||
status=location['status'],
|
||||
deleted=deleted)
|
||||
DATA['locations'].append(location_ref)
|
||||
image = DATA['images'][image_id]
|
||||
for location in image['locations']:
|
||||
location['deleted'] = True
|
||||
location['deleted_at'] = timeutils.utcnow()
|
||||
image.setdefault('locations', []).append(location_ref)
|
||||
|
||||
for i, location in enumerate(DATA['locations']):
|
||||
if image_id == location['image_id'] and location['deleted'] is False:
|
||||
|
||||
@log_call
|
||||
def image_location_update(context, image_id, location):
|
||||
loc_id = location.get('id')
|
||||
if loc_id is None:
|
||||
msg = _("The location data has an invalid ID: %d") % loc_id
|
||||
raise exception.Invalid(msg)
|
||||
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
updated_time = timeutils.utcnow()
|
||||
delete_time = updated_time if deleted else None
|
||||
|
||||
updated = False
|
||||
for loc in DATA['locations']:
|
||||
if (loc['id'] == loc_id and loc['image_id'] == image_id):
|
||||
loc.update({"value": location['url'],
|
||||
"meta_data": location['metadata'],
|
||||
"status": location['status'],
|
||||
"deleted": deleted,
|
||||
"updated_at": updated_time,
|
||||
"deleted_at": delete_time})
|
||||
updated = True
|
||||
break
|
||||
|
||||
if not updated:
|
||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||
dict(loc=location_id, img=image_id))
|
||||
LOG.warn(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
|
||||
@log_call
|
||||
def image_location_delete(context, image_id, location_id, status,
|
||||
delete_time=None):
|
||||
if status not in ('deleted', 'pending_delete'):
|
||||
msg = _("The status of deleted image location can only be set to "
|
||||
"'pending_delete' or 'deleted'.")
|
||||
raise exception.Invalid(msg)
|
||||
|
||||
deleted = False
|
||||
for loc in DATA['locations']:
|
||||
if (loc['id'] == location_id and loc['image_id'] == image_id):
|
||||
deleted = True
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
loc.update({"deleted": deleted,
|
||||
"status": status,
|
||||
"updated_at": delete_time,
|
||||
"deleted_at": delete_time})
|
||||
break
|
||||
|
||||
if not deleted:
|
||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||
dict(loc=location_id, img=image_id))
|
||||
LOG.warn(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
|
||||
def _image_locations_set(context, image_id, locations):
|
||||
# NOTE(zhiyan): 1. Remove records from DB for deleted locations
|
||||
used_loc_ids = [loc['id'] for loc in locations if loc.get('id')]
|
||||
image = DATA['images'][image_id]
|
||||
for loc in image['locations']:
|
||||
if loc['id'] not in used_loc_ids and not loc['deleted']:
|
||||
image_location_delete(context, image_id, loc['id'], 'deleted')
|
||||
for i, loc in enumerate(DATA['locations']):
|
||||
if (loc['image_id'] == image_id and loc['id'] not in used_loc_ids and
|
||||
not loc['deleted']):
|
||||
del DATA['locations'][i]
|
||||
|
||||
for location in locations:
|
||||
location_ref = _image_locations_format(image_id, value=location['url'],
|
||||
meta_data=location['metadata'])
|
||||
DATA['locations'].append(location_ref)
|
||||
|
||||
image['locations'].append(location_ref)
|
||||
# NOTE(zhiyan): 2. Adding or update locations
|
||||
for loc in locations:
|
||||
if loc.get('id') is None:
|
||||
image_location_add(context, image_id, loc)
|
||||
else:
|
||||
image_location_update(context, image_id, loc)
|
||||
|
||||
|
||||
def _normalize_locations(image):
|
||||
undeleted_locations = filter(lambda x: not x['deleted'],
|
||||
image['locations'])
|
||||
image['locations'] = [{'url': loc['url'],
|
||||
'metadata': loc['metadata']}
|
||||
for loc in undeleted_locations]
|
||||
def _image_locations_delete_all(context, image_id, delete_time=None):
|
||||
image = DATA['images'][image_id]
|
||||
for loc in image['locations']:
|
||||
if not loc['deleted']:
|
||||
image_location_delete(context, image_id, loc['id'], 'deleted',
|
||||
delete_time=delete_time)
|
||||
|
||||
for i, loc in enumerate(DATA['locations']):
|
||||
if image_id == loc['image_id'] and loc['deleted'] == False:
|
||||
del DATA['locations'][i]
|
||||
|
||||
|
||||
def _normalize_locations(image, force_show_deleted=False):
|
||||
"""
|
||||
Generate suitable dictionary list for locations field of image.
|
||||
|
||||
We don't need to set other data fields of location record which return
|
||||
from image query.
|
||||
"""
|
||||
|
||||
if force_show_deleted:
|
||||
locations = image['locations']
|
||||
else:
|
||||
locations = filter(lambda x: not x['deleted'], image['locations'])
|
||||
image['locations'] = [{'id': loc['id'],
|
||||
'url': loc['url'],
|
||||
'metadata': loc['metadata'],
|
||||
'status': loc['status']}
|
||||
for loc in locations]
|
||||
return image
|
||||
|
||||
|
||||
def _image_location_get_all(image_id):
|
||||
location_data = []
|
||||
for location in DATA['locations']:
|
||||
if image_id == location['image_id']:
|
||||
location_data.append(location)
|
||||
return location_data
|
||||
|
||||
|
||||
@log_call
|
||||
def image_create(context, image_values):
|
||||
global DATA
|
||||
@ -527,11 +615,6 @@ def image_create(context, image_values):
|
||||
|
||||
image = _image_format(image_id, **image_values)
|
||||
DATA['images'][image_id] = image
|
||||
|
||||
location_data = image_values.get('locations')
|
||||
if location_data is not None:
|
||||
_image_locations_set(image_id, location_data)
|
||||
|
||||
DATA['tags'][image_id] = image.pop('tags', [])
|
||||
|
||||
return _normalize_locations(copy.deepcopy(image))
|
||||
@ -548,7 +631,7 @@ def image_update(context, image_id, image_values, purge_props=False,
|
||||
|
||||
location_data = image_values.pop('locations', None)
|
||||
if location_data is not None:
|
||||
_image_locations_set(image_id, location_data)
|
||||
_image_locations_set(context, image_id, location_data)
|
||||
|
||||
# replace values for properties that already exist
|
||||
new_properties = image_values.pop('properties', {})
|
||||
@ -567,15 +650,16 @@ def image_update(context, image_id, image_values, purge_props=False,
|
||||
image['updated_at'] = timeutils.utcnow()
|
||||
image.update(image_values)
|
||||
DATA['images'][image_id] = image
|
||||
return _normalize_locations(image)
|
||||
return _normalize_locations(copy.deepcopy(image))
|
||||
|
||||
|
||||
@log_call
|
||||
def image_destroy(context, image_id):
|
||||
global DATA
|
||||
try:
|
||||
delete_time = timeutils.utcnow()
|
||||
DATA['images'][image_id]['deleted'] = True
|
||||
DATA['images'][image_id]['deleted_at'] = timeutils.utcnow()
|
||||
DATA['images'][image_id]['deleted_at'] = delete_time
|
||||
|
||||
# NOTE(flaper87): Move the image to one of the deleted statuses
|
||||
# if it hasn't been done yet.
|
||||
@ -583,7 +667,8 @@ def image_destroy(context, image_id):
|
||||
['deleted', 'pending_delete']):
|
||||
DATA['images'][image_id]['status'] = 'deleted'
|
||||
|
||||
_image_locations_set(image_id, [])
|
||||
_image_locations_delete_all(context, image_id,
|
||||
delete_time=delete_time)
|
||||
|
||||
for prop in DATA['images'][image_id]['properties']:
|
||||
image_property_delete(context, prop['name'], image_id)
|
||||
@ -596,9 +681,7 @@ def image_destroy(context, image_id):
|
||||
for tag in tags:
|
||||
image_tag_delete(context, image_id, tag)
|
||||
|
||||
_normalize_locations(DATA['images'][image_id])
|
||||
|
||||
return copy.deepcopy(DATA['images'][image_id])
|
||||
return _normalize_locations(copy.deepcopy(DATA['images'][image_id]))
|
||||
except KeyError:
|
||||
raise exception.NotFound()
|
||||
|
||||
@ -690,12 +773,12 @@ def user_get_storage_usage(context, owner_id, image_id=None, session=None):
|
||||
images = image_get_all(context, filters={'owner': owner_id})
|
||||
total = 0
|
||||
for image in images:
|
||||
if image['status'] in ['killed', 'pending_delete', 'deleted']:
|
||||
if image['status'] in ['killed', 'deleted']:
|
||||
continue
|
||||
|
||||
if image['id'] != image_id:
|
||||
locations = [l for l in image['locations']
|
||||
if not l.get('deleted', False)]
|
||||
locations = [loc for loc in image['locations']
|
||||
if loc.get('status') != 'deleted']
|
||||
total += (image['size'] * len(locations))
|
||||
return total
|
||||
|
||||
|
@ -120,8 +120,7 @@ def image_destroy(context, image_id):
|
||||
image_ref.delete(session=session)
|
||||
delete_time = image_ref.deleted_at
|
||||
|
||||
_image_locations_delete_all(context, image_ref.id, delete_time,
|
||||
session)
|
||||
_image_locations_delete_all(context, image_id, delete_time, session)
|
||||
|
||||
_image_property_delete_all(context, image_id, delete_time, session)
|
||||
|
||||
@ -132,11 +131,23 @@ def image_destroy(context, image_id):
|
||||
return _normalize_locations(image_ref)
|
||||
|
||||
|
||||
def _normalize_locations(image):
|
||||
undeleted_locations = filter(lambda x: not x.deleted, image['locations'])
|
||||
image['locations'] = [{'url': loc['value'],
|
||||
'metadata': loc['meta_data']}
|
||||
for loc in undeleted_locations]
|
||||
def _normalize_locations(image, force_show_deleted=False):
|
||||
"""
|
||||
Generate suitable dictionary list for locations field of image.
|
||||
|
||||
We don't need to set other data fields of location record which return
|
||||
from image query.
|
||||
"""
|
||||
|
||||
if force_show_deleted:
|
||||
locations = image['locations']
|
||||
else:
|
||||
locations = filter(lambda x: not x.deleted, image['locations'])
|
||||
image['locations'] = [{'id': loc['id'],
|
||||
'url': loc['value'],
|
||||
'metadata': loc['meta_data'],
|
||||
'status': loc['status']}
|
||||
for loc in locations]
|
||||
return image
|
||||
|
||||
|
||||
@ -149,7 +160,8 @@ def _normalize_tags(image):
|
||||
def image_get(context, image_id, session=None, force_show_deleted=False):
|
||||
image = _image_get(context, image_id, session=session,
|
||||
force_show_deleted=force_show_deleted)
|
||||
image = _normalize_locations(image.to_dict())
|
||||
image = _normalize_locations(image.to_dict(),
|
||||
force_show_deleted=force_show_deleted)
|
||||
return image
|
||||
|
||||
|
||||
@ -560,7 +572,8 @@ def image_get_all(context, filters=None, marker=None, limit=None,
|
||||
images = []
|
||||
for image in query.all():
|
||||
image_dict = image.to_dict()
|
||||
image_dict = _normalize_locations(image_dict)
|
||||
image_dict = _normalize_locations(image_dict,
|
||||
force_show_deleted=showing_deleted)
|
||||
if return_tag:
|
||||
image_dict = _normalize_tags(image_dict)
|
||||
images.append(image_dict)
|
||||
@ -583,14 +596,12 @@ def _image_get_disk_usage_by_owner(owner, session, image_id=None):
|
||||
if image_id is not None:
|
||||
query = query.filter(models.Image.id != image_id)
|
||||
query = query.filter(models.Image.size > 0)
|
||||
query = query.filter(~models.Image.status.in_(['killed',
|
||||
'pending_delete',
|
||||
'deleted']))
|
||||
query = query.filter(~models.Image.status.in_(['killed', 'deleted']))
|
||||
images = query.all()
|
||||
|
||||
total = 0
|
||||
for i in images:
|
||||
locations = [l for l in i.locations if not l['deleted']]
|
||||
locations = [l for l in i.locations if l['status'] != 'deleted']
|
||||
total += (i.size * len(locations))
|
||||
return total
|
||||
|
||||
@ -724,35 +735,118 @@ def _image_update(context, values, image_id, purge_props=False,
|
||||
_set_properties_for_image(context, image_ref, properties, purge_props,
|
||||
session)
|
||||
|
||||
if location_data is not None:
|
||||
_image_locations_set(image_ref.id, location_data, session)
|
||||
if location_data is not None:
|
||||
_image_locations_set(context, image_ref.id, location_data,
|
||||
session=session)
|
||||
|
||||
return image_get(context, image_ref.id)
|
||||
|
||||
|
||||
def _image_locations_set(image_id, locations, session):
|
||||
location_refs = session.query(models.ImageLocation)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.filter_by(deleted=False)\
|
||||
.all()
|
||||
for location_ref in location_refs:
|
||||
location_ref.delete(session=session)
|
||||
|
||||
for location in locations:
|
||||
location_ref = models.ImageLocation(image_id=image_id,
|
||||
value=location['url'],
|
||||
meta_data=location['metadata'])
|
||||
location_ref.save()
|
||||
def image_location_add(context, image_id, location, session=None):
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
delete_time = timeutils.utcnow() if deleted else None
|
||||
location_ref = models.ImageLocation(image_id=image_id,
|
||||
value=location['url'],
|
||||
meta_data=location['metadata'],
|
||||
status=location['status'],
|
||||
deleted=deleted,
|
||||
deleted_at=delete_time)
|
||||
session = session or get_session()
|
||||
location_ref.save(session=session)
|
||||
|
||||
|
||||
def _image_locations_delete_all(context, image_id, delete_time=None,
|
||||
session=None):
|
||||
def image_location_update(context, image_id, location, session=None):
|
||||
loc_id = location.get('id')
|
||||
if loc_id is None:
|
||||
msg = _("The location data has an invalid ID: %d") % loc_id
|
||||
raise exception.Invalid(msg)
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=loc_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
|
||||
deleted = location['status'] in ('deleted', 'pending_delete')
|
||||
updated_time = timeutils.utcnow()
|
||||
delete_time = updated_time if deleted else None
|
||||
|
||||
location_ref.update({"value": location['url'],
|
||||
"meta_data": location['metadata'],
|
||||
"status": location['status'],
|
||||
"deleted": deleted,
|
||||
"updated_at": updated_time,
|
||||
"deleted_at": delete_time})
|
||||
location_ref.save(session=session)
|
||||
except sa_orm.exc.NoResultFound:
|
||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||
dict(loc=location_id, img=image_id))
|
||||
LOG.warn(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
|
||||
def image_location_delete(context, image_id, location_id, status,
|
||||
delete_time=None, session=None):
|
||||
if status not in ('deleted', 'pending_delete'):
|
||||
msg = _("The status of deleted image location can only be set to "
|
||||
"'pending_delete' or 'deleted'")
|
||||
raise exception.Invalid(msg)
|
||||
|
||||
try:
|
||||
session = session or get_session()
|
||||
location_ref = session.query(models.ImageLocation)\
|
||||
.filter_by(id=location_id)\
|
||||
.filter_by(image_id=image_id)\
|
||||
.one()
|
||||
|
||||
delete_time = delete_time or timeutils.utcnow()
|
||||
|
||||
location_ref.update({"deleted": True,
|
||||
"status": status,
|
||||
"updated_at": delete_time,
|
||||
"deleted_at": delete_time})
|
||||
location_ref.save(session=session)
|
||||
except sa_orm.exc.NoResultFound:
|
||||
msg = (_("No location found with ID %(loc)s from image %(img)s") %
|
||||
dict(loc=location_id, img=image_id))
|
||||
LOG.warn(msg)
|
||||
raise exception.NotFound(msg)
|
||||
|
||||
|
||||
def _image_locations_set(context, image_id, locations, session=None):
|
||||
# NOTE(zhiyan): 1. Remove records from DB for deleted locations
|
||||
session = session or get_session()
|
||||
query = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.filter(~models.ImageLocation.id.in_([loc['id']
|
||||
for loc in locations
|
||||
if loc.get('id')]))
|
||||
for loc_id in [loc_ref.id for loc_ref in query.all()]:
|
||||
image_location_delete(context, image_id, loc_id, 'deleted',
|
||||
session=session)
|
||||
|
||||
# NOTE(zhiyan): 2. Adding or update locations
|
||||
for loc in locations:
|
||||
if loc.get('id') is None:
|
||||
image_location_add(context, image_id, loc, session=session)
|
||||
else:
|
||||
image_location_update(context, image_id, loc, session=session)
|
||||
|
||||
|
||||
def _image_locations_delete_all(context, image_id,
|
||||
delete_time=None, session=None):
|
||||
"""Delete all image locations for given image"""
|
||||
locs_updated_count = _image_child_entry_delete_all(models.ImageLocation,
|
||||
image_id,
|
||||
delete_time,
|
||||
session)
|
||||
return locs_updated_count
|
||||
session = session or get_session()
|
||||
location_refs = session.query(models.ImageLocation) \
|
||||
.filter_by(image_id=image_id) \
|
||||
.filter_by(deleted=False) \
|
||||
.all()
|
||||
|
||||
for loc_id in [loc_ref.id for loc_ref in location_refs]:
|
||||
image_location_delete(context, image_id, loc_id, 'deleted',
|
||||
delete_time=delete_time, session=session)
|
||||
|
||||
|
||||
def _set_properties_for_image(context, image_ref, properties,
|
||||
|
@ -18,6 +18,7 @@ from glance.api import authorization
|
||||
from glance.api import policy
|
||||
from glance.api import property_protections
|
||||
from glance.common import property_utils
|
||||
from glance.common import store_utils
|
||||
import glance.db
|
||||
import glance.domain
|
||||
import glance.location
|
||||
@ -31,15 +32,16 @@ class Gateway(object):
|
||||
policy_enforcer=None):
|
||||
self.db_api = db_api or glance.db.get_api()
|
||||
self.store_api = store_api or glance.store
|
||||
self.store_utils = store_utils
|
||||
self.notifier = notifier or glance.notifier.Notifier()
|
||||
self.policy = policy_enforcer or policy.Enforcer()
|
||||
|
||||
def get_image_factory(self, context):
|
||||
image_factory = glance.domain.ImageFactory()
|
||||
store_image_factory = glance.location.ImageFactoryProxy(
|
||||
image_factory, context, self.store_api)
|
||||
image_factory, context, self.store_api, self.store_utils)
|
||||
quota_image_factory = glance.quota.ImageFactoryProxy(
|
||||
store_image_factory, context, self.db_api)
|
||||
store_image_factory, context, self.db_api, self.store_utils)
|
||||
policy_image_factory = policy.ImageFactoryProxy(
|
||||
quota_image_factory, context, self.policy)
|
||||
notifier_image_factory = glance.notifier.ImageFactoryProxy(
|
||||
@ -59,7 +61,7 @@ class Gateway(object):
|
||||
def get_image_member_factory(self, context):
|
||||
image_factory = glance.domain.ImageMemberFactory()
|
||||
quota_image_factory = glance.quota.ImageMemberFactoryProxy(
|
||||
image_factory, context, self.db_api)
|
||||
image_factory, context, self.db_api, self.store_utils)
|
||||
policy_member_factory = policy.ImageMemberFactoryProxy(
|
||||
quota_image_factory, context, self.policy)
|
||||
authorized_image_factory = authorization.ImageMemberFactoryProxy(
|
||||
@ -69,9 +71,9 @@ class Gateway(object):
|
||||
def get_repo(self, context):
|
||||
image_repo = glance.db.ImageRepo(context, self.db_api)
|
||||
store_image_repo = glance.location.ImageRepoProxy(
|
||||
image_repo, context, self.store_api)
|
||||
image_repo, context, self.store_api, self.store_utils)
|
||||
quota_image_repo = glance.quota.ImageRepoProxy(
|
||||
store_image_repo, context, self.db_api)
|
||||
store_image_repo, context, self.db_api, self.store_utils)
|
||||
policy_image_repo = policy.ImageRepoProxy(
|
||||
quota_image_repo, context, self.policy)
|
||||
notifier_image_repo = glance.notifier.ImageRepoProxy(
|
||||
|
@ -22,19 +22,23 @@ from glance.common import exception
|
||||
from glance.common import utils
|
||||
import glance.domain.proxy
|
||||
from glance.openstack.common import excutils
|
||||
from glance.openstack.common import gettextutils
|
||||
import glance.openstack.common.log as logging
|
||||
from glance import store
|
||||
|
||||
_LE = gettextutils._LE
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
||||
def __init__(self, image_repo, context, store_api):
|
||||
def __init__(self, image_repo, context, store_api, store_utils):
|
||||
self.context = context
|
||||
self.store_api = store_api
|
||||
proxy_kwargs = {'context': context, 'store_api': store_api}
|
||||
proxy_kwargs = {'context': context, 'store_api': store_api,
|
||||
'store_utils': store_utils}
|
||||
super(ImageRepoProxy, self).__init__(image_repo,
|
||||
item_proxy_class=ImageProxy,
|
||||
item_proxy_kwargs=proxy_kwargs)
|
||||
@ -61,8 +65,7 @@ class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
||||
|
||||
def _check_location_uri(context, store_api, uri):
|
||||
"""
|
||||
Check if an image location uri is valid.
|
||||
"""Check if an image location is valid.
|
||||
|
||||
:param context: Glance request context
|
||||
:param store_api: store API module
|
||||
@ -88,7 +91,7 @@ def _check_image_location(context, store_api, location):
|
||||
def _set_image_size(context, image, locations):
|
||||
if not image.size:
|
||||
for location in locations:
|
||||
size_from_backend = glance.store.get_size_from_backend(
|
||||
size_from_backend = store.get_size_from_backend(
|
||||
context, location['url'])
|
||||
if size_from_backend:
|
||||
# NOTE(flwang): This assumes all locations have the same size
|
||||
@ -96,23 +99,39 @@ def _set_image_size(context, image, locations):
|
||||
break
|
||||
|
||||
|
||||
def _count_duplicated_locations(locations, new):
|
||||
"""
|
||||
To calculate the count of duplicated locations for new one.
|
||||
|
||||
:param locations: The exiting image location set
|
||||
:param new: The new image location
|
||||
:returns: The count of duplicated locations
|
||||
"""
|
||||
|
||||
ret = 0
|
||||
for loc in locations:
|
||||
if (loc['url'] == new['url'] and loc['metadata'] == new['metadata']):
|
||||
ret += 1
|
||||
return ret
|
||||
|
||||
|
||||
class ImageFactoryProxy(glance.domain.proxy.ImageFactory):
|
||||
def __init__(self, factory, context, store_api):
|
||||
def __init__(self, factory, context, store_api, store_utils):
|
||||
self.context = context
|
||||
self.store_api = store_api
|
||||
proxy_kwargs = {'context': context, 'store_api': store_api}
|
||||
proxy_kwargs = {'context': context, 'store_api': store_api,
|
||||
'store_utils': store_utils}
|
||||
super(ImageFactoryProxy, self).__init__(factory,
|
||||
proxy_class=ImageProxy,
|
||||
proxy_kwargs=proxy_kwargs)
|
||||
|
||||
def new_image(self, **kwargs):
|
||||
locations = kwargs.get('locations', [])
|
||||
for l in locations:
|
||||
_check_image_location(self.context, self.store_api, l)
|
||||
|
||||
if locations.count(l) > 1:
|
||||
raise exception.DuplicateLocation(location=l['url'])
|
||||
|
||||
for loc in locations:
|
||||
_check_image_location(self.context, self.store_api, loc)
|
||||
loc['status'] = 'active'
|
||||
if _count_duplicated_locations(locations, loc) > 1:
|
||||
raise exception.DuplicateLocation(location=loc['url'])
|
||||
return super(ImageFactoryProxy, self).new_image(**kwargs)
|
||||
|
||||
|
||||
@ -148,8 +167,8 @@ class StoreLocations(collections.MutableSequence):
|
||||
def insert(self, i, location):
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api, location)
|
||||
|
||||
if location in self.value:
|
||||
location['status'] = 'active'
|
||||
if _count_duplicated_locations(self.value, location) > 0:
|
||||
raise exception.DuplicateLocation(location=location['url'])
|
||||
|
||||
self.value.insert(i, location)
|
||||
@ -160,10 +179,10 @@ class StoreLocations(collections.MutableSequence):
|
||||
def pop(self, i=-1):
|
||||
location = self.value.pop(i)
|
||||
try:
|
||||
store.delete_image_from_backend(self.image_proxy.context,
|
||||
self.image_proxy.store_api,
|
||||
self.image_proxy.image.image_id,
|
||||
location['url'])
|
||||
self.image_proxy.store_utils.delete_image_location_from_backend(
|
||||
self.image_proxy.context,
|
||||
self.image_proxy.image.image_id,
|
||||
location)
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.value.insert(i, location)
|
||||
@ -193,6 +212,7 @@ class StoreLocations(collections.MutableSequence):
|
||||
def __setitem__(self, i, location):
|
||||
_check_image_location(self.image_proxy.context,
|
||||
self.image_proxy.store_api, location)
|
||||
location['status'] = 'active'
|
||||
self.value.__setitem__(i, location)
|
||||
_set_image_size(self.image_proxy.context,
|
||||
self.image_proxy,
|
||||
@ -204,10 +224,10 @@ class StoreLocations(collections.MutableSequence):
|
||||
location = self.value.__getitem__(i)
|
||||
except Exception:
|
||||
return self.value.__delitem__(i)
|
||||
store.delete_image_from_backend(self.image_proxy.context,
|
||||
self.image_proxy.store_api,
|
||||
self.image_proxy.image.image_id,
|
||||
location['url'])
|
||||
self.image_proxy.store_utils.delete_image_location_from_backend(
|
||||
self.image_proxy.context,
|
||||
self.image_proxy.image.image_id,
|
||||
location)
|
||||
self.value.__delitem__(i)
|
||||
|
||||
def __delslice__(self, i, j):
|
||||
@ -219,10 +239,10 @@ class StoreLocations(collections.MutableSequence):
|
||||
except Exception:
|
||||
return self.value.__delslice__(i, j)
|
||||
for location in locations:
|
||||
store.delete_image_from_backend(self.image_proxy.context,
|
||||
self.image_proxy.store_api,
|
||||
self.image_proxy.image.image_id,
|
||||
location['url'])
|
||||
self.image_proxy.store_utils.delete_image_location_from_backend(
|
||||
self.image_proxy.context,
|
||||
self.image_proxy.image.image_id,
|
||||
location)
|
||||
self.value.__delitem__(i)
|
||||
|
||||
def __iadd__(self, other):
|
||||
@ -282,8 +302,8 @@ def _locations_proxy(target, attr):
|
||||
for location in value:
|
||||
_check_image_location(self.context, self.store_api,
|
||||
location)
|
||||
|
||||
if value.count(location) > 1:
|
||||
location['status'] = 'active'
|
||||
if _count_duplicated_locations(value, location) > 1:
|
||||
raise exception.DuplicateLocation(location=location['url'])
|
||||
_set_image_size(self.context, getattr(self, target), value)
|
||||
return setattr(getattr(self, target), attr, list(value))
|
||||
@ -291,8 +311,10 @@ def _locations_proxy(target, attr):
|
||||
def del_attr(self):
|
||||
value = getattr(getattr(self, target), attr)
|
||||
while len(value):
|
||||
delete_image_from_backend(self.context, self.store_api,
|
||||
self.image.image_id, value[0]['url'])
|
||||
self.store_utils.delete_image_location_from_backend(
|
||||
self.context,
|
||||
self.image.image_id,
|
||||
value[0])
|
||||
del value[0]
|
||||
setattr(getattr(self, target), attr, value)
|
||||
return delattr(getattr(self, target), attr)
|
||||
@ -304,10 +326,11 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
|
||||
locations = _locations_proxy('image', 'locations')
|
||||
|
||||
def __init__(self, image, context, store_api):
|
||||
def __init__(self, image, context, store_api, store_utils):
|
||||
self.image = image
|
||||
self.context = context
|
||||
self.store_api = store_api
|
||||
self.store_utils = store_utils
|
||||
proxy_kwargs = {
|
||||
'context': context,
|
||||
'image': self,
|
||||
@ -321,10 +344,10 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
self.image.delete()
|
||||
if self.image.locations:
|
||||
for location in self.image.locations:
|
||||
self.store_api.delete_image_from_backend(self.context,
|
||||
self.store_api,
|
||||
self.image.image_id,
|
||||
location['url'])
|
||||
self.store_utils.delete_image_location_from_backend(
|
||||
self.context,
|
||||
self.image.image_id,
|
||||
location)
|
||||
|
||||
def set_data(self, data, size=None):
|
||||
if size is None:
|
||||
@ -332,7 +355,8 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
location, size, checksum, loc_meta = self.store_api.add_to_backend(
|
||||
self.context, CONF.default_store,
|
||||
self.image.image_id, utils.CooperativeReader(data), size)
|
||||
self.image.locations = [{'url': location, 'metadata': loc_meta}]
|
||||
self.image.locations = [{'url': location, 'metadata': loc_meta,
|
||||
'status': 'active'}]
|
||||
self.image.size = size
|
||||
self.image.checksum = checksum
|
||||
self.image.status = 'active'
|
||||
@ -353,8 +377,8 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
'err': utils.exception_to_str(e)})
|
||||
err = e
|
||||
# tried all locations
|
||||
LOG.error(_('Glance tried all locations to get data for image %s '
|
||||
'but all have failed.') % self.image.image_id)
|
||||
LOG.error(_LE('Glance tried all active locations to get data for '
|
||||
'image %s but all have failed.') % self.image.image_id)
|
||||
raise err
|
||||
|
||||
|
||||
@ -371,8 +395,8 @@ class ImageMemberRepoProxy(glance.domain.proxy.Repo):
|
||||
if self.image.locations and not public:
|
||||
member_ids = [m.member_id for m in self.repo.list()]
|
||||
for location in self.image.locations:
|
||||
self.store_api.set_acls(self.context, location['url'], public,
|
||||
read_tenants=member_ids)
|
||||
self.store_api.set_acls(self.context, location['url'],
|
||||
public, read_tenants=member_ids)
|
||||
|
||||
def add(self, member):
|
||||
super(ImageMemberRepoProxy, self).add(member)
|
||||
|
@ -78,10 +78,11 @@ def _enforce_image_location_quota(image, locations, is_setter=False):
|
||||
|
||||
class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
||||
def __init__(self, image_repo, context, db_api):
|
||||
def __init__(self, image_repo, context, db_api, store_utils):
|
||||
self.image_repo = image_repo
|
||||
self.db_api = db_api
|
||||
proxy_kwargs = {'db_api': db_api, 'context': context}
|
||||
proxy_kwargs = {'context': context, 'db_api': db_api,
|
||||
'store_utils': store_utils}
|
||||
super(ImageRepoProxy, self).__init__(image_repo,
|
||||
item_proxy_class=ImageProxy,
|
||||
item_proxy_kwargs=proxy_kwargs)
|
||||
@ -107,8 +108,9 @@ class ImageRepoProxy(glance.domain.proxy.Repo):
|
||||
|
||||
|
||||
class ImageFactoryProxy(glance.domain.proxy.ImageFactory):
|
||||
def __init__(self, factory, context, db_api):
|
||||
proxy_kwargs = {'db_api': db_api, 'context': context}
|
||||
def __init__(self, factory, context, db_api, store_utils):
|
||||
proxy_kwargs = {'context': context, 'db_api': db_api,
|
||||
'store_utils': store_utils}
|
||||
super(ImageFactoryProxy, self).__init__(factory,
|
||||
proxy_class=ImageProxy,
|
||||
proxy_kwargs=proxy_kwargs)
|
||||
@ -152,13 +154,15 @@ class QuotaImageTagsProxy(object):
|
||||
|
||||
class ImageMemberFactoryProxy(glance.domain.proxy.ImageMembershipFactory):
|
||||
|
||||
def __init__(self, member_factory, context, db_api):
|
||||
def __init__(self, member_factory, context, db_api, store_utils):
|
||||
self.db_api = db_api
|
||||
self.context = context
|
||||
proxy_kwargs = {'context': context, 'db_api': db_api,
|
||||
'store_utils': store_utils}
|
||||
super(ImageMemberFactoryProxy, self).__init__(
|
||||
member_factory,
|
||||
image_proxy_class=ImageProxy,
|
||||
image_proxy_kwargs={})
|
||||
image_proxy_kwargs=proxy_kwargs)
|
||||
|
||||
def _enforce_image_member_quota(self, image):
|
||||
if CONF.image_member_quota < 0:
|
||||
@ -267,10 +271,11 @@ class QuotaImageLocationsProxy(object):
|
||||
|
||||
class ImageProxy(glance.domain.proxy.Image):
|
||||
|
||||
def __init__(self, image, context, db_api):
|
||||
def __init__(self, image, context, db_api, store_utils):
|
||||
self.image = image
|
||||
self.context = context
|
||||
self.db_api = db_api
|
||||
self.store_utils = store_utils
|
||||
super(ImageProxy, self).__init__(image)
|
||||
|
||||
def set_data(self, data, size=None):
|
||||
@ -317,9 +322,8 @@ class ImageProxy(glance.domain.proxy.Image):
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.info(_('Cleaning up %s after exceeding the quota.')
|
||||
% self.image.image_id)
|
||||
location = self.image.locations[0]['url']
|
||||
glance.store.safe_delete_from_backend(
|
||||
self.context, location, self.image.image_id)
|
||||
self.store_utils.safe_delete_from_backend(
|
||||
self.context, self.image.image_id, self.image.locations[0])
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
|
@ -71,7 +71,10 @@ def _normalize_image_location_for_db(image_data):
|
||||
for l in locations:
|
||||
location_data_dict[l] = {}
|
||||
for l in location_data:
|
||||
location_data_dict[l['url']] = l['metadata']
|
||||
location_data_dict[l['url']] = {'metadata': l['metadata'],
|
||||
'status': l['status'],
|
||||
# Note(zhiyan): New location has no ID.
|
||||
'id': l['id'] if 'id' in l else None}
|
||||
|
||||
# NOTE(jbresnah) preserve original order. tests assume original order,
|
||||
# should that be defined functionality
|
||||
@ -79,8 +82,21 @@ def _normalize_image_location_for_db(image_data):
|
||||
for ld in location_data:
|
||||
if ld['url'] not in ordered_keys:
|
||||
ordered_keys.append(ld['url'])
|
||||
location_data = [{'url': l, 'metadata': location_data_dict[l]}
|
||||
for l in ordered_keys]
|
||||
|
||||
location_data = []
|
||||
for loc in ordered_keys:
|
||||
data = location_data_dict[loc]
|
||||
if data:
|
||||
location_data.append({'url': loc,
|
||||
'metadata': data['metadata'],
|
||||
'status': data['status'],
|
||||
'id': data['id']})
|
||||
else:
|
||||
location_data.append({'url': loc,
|
||||
'metadata': {},
|
||||
'status': 'active',
|
||||
'id': None})
|
||||
|
||||
image_data['locations'] = location_data
|
||||
return image_data
|
||||
|
||||
@ -492,11 +508,12 @@ class Controller(object):
|
||||
|
||||
def _limit_locations(image):
|
||||
locations = image.pop('locations', [])
|
||||
try:
|
||||
image['location'] = locations[0]['url']
|
||||
except IndexError:
|
||||
image['location'] = None
|
||||
image['location_data'] = locations
|
||||
image['location'] = None
|
||||
for loc in locations:
|
||||
if loc['status'] == 'active':
|
||||
image['location'] = loc['url']
|
||||
break
|
||||
|
||||
|
||||
def make_image_dict(image):
|
||||
|
@ -58,7 +58,9 @@ class RegistryClient(BaseClient):
|
||||
for loc in image_metadata['location_data']:
|
||||
url = crypt.urlsafe_decrypt(self.metadata_encryption_key,
|
||||
loc['url'])
|
||||
ld.append({'url': url, 'metadata': loc['metadata']})
|
||||
ld.append({'id': loc['id'], 'url': url,
|
||||
'metadata': loc['metadata'],
|
||||
'status': loc['status']})
|
||||
image_metadata['location_data'] = ld
|
||||
return image_metadata
|
||||
|
||||
@ -78,7 +80,10 @@ class RegistryClient(BaseClient):
|
||||
else:
|
||||
url = crypt.urlsafe_encrypt(
|
||||
self.metadata_encryption_key, loc['url'], 64)
|
||||
ld.append({'url': url, 'metadata': loc['metadata']})
|
||||
ld.append({'url': url, 'metadata': loc['metadata'],
|
||||
'status': loc['status'],
|
||||
# NOTE(zhiyan): New location has no ID field.
|
||||
'id': loc.get('id')})
|
||||
image_metadata['location_data'] = ld
|
||||
return image_metadata
|
||||
|
||||
|
@ -68,12 +68,14 @@ class ScrubQueue(object):
|
||||
self.registry = registry.get_registry_client(context.RequestContext())
|
||||
|
||||
@abc.abstractmethod
|
||||
def add_location(self, image_id, uri, user_context=None):
|
||||
def add_location(self, image_id, location, user_context=None):
|
||||
"""Adding image location to scrub queue.
|
||||
|
||||
:param image_id: The opaque image identifier
|
||||
:param uri: The opaque image location uri
|
||||
:param location: The opaque image location
|
||||
:param user_context: The user's request context
|
||||
|
||||
:retval A boolean value to indicate success or not
|
||||
"""
|
||||
pass
|
||||
|
||||
@ -158,12 +160,14 @@ class ScrubFileQueue(ScrubQueue):
|
||||
except Exception:
|
||||
LOG.error(_("%s file can not be wrote.") % file_path)
|
||||
|
||||
def add_location(self, image_id, uri, user_context=None):
|
||||
def add_location(self, image_id, location, user_context=None):
|
||||
"""Adding image location to scrub queue.
|
||||
|
||||
:param image_id: The opaque image identifier
|
||||
:param uri: The opaque image location uri
|
||||
:param location: The opaque image location
|
||||
:param user_context: The user's request context
|
||||
|
||||
:retval A boolean value to indicate success or not
|
||||
"""
|
||||
if user_context is not None:
|
||||
registry_client = registry.get_registry_client(user_context)
|
||||
@ -179,18 +183,20 @@ class ScrubFileQueue(ScrubQueue):
|
||||
try:
|
||||
image = registry_client.get_image(image_id)
|
||||
if image['status'] == 'deleted':
|
||||
return
|
||||
return True
|
||||
except exception.NotFound as e:
|
||||
LOG.error(_("Failed to find image to delete: %s"),
|
||||
utils.exception_to_str(e))
|
||||
return
|
||||
return False
|
||||
|
||||
delete_time = time.time() + self.scrub_time
|
||||
file_path = os.path.join(self.scrubber_datadir, str(image_id))
|
||||
|
||||
if self.metadata_encryption_key is not None:
|
||||
uri = crypt.urlsafe_encrypt(self.metadata_encryption_key,
|
||||
uri, 64)
|
||||
location['url'], 64)
|
||||
else:
|
||||
uri = location['url']
|
||||
|
||||
if os.path.exists(file_path):
|
||||
# Append the uri of location to the queue file
|
||||
@ -204,6 +210,7 @@ class ScrubFileQueue(ScrubQueue):
|
||||
with open(file_path, 'w') as f:
|
||||
f.write('\n'.join([uri, str(int(delete_time))]))
|
||||
os.utime(file_path, (delete_time, delete_time))
|
||||
return True
|
||||
|
||||
def _walk_all_locations(self, remove=False):
|
||||
"""Returns a list of image id and location tuple from scrub queue.
|
||||
@ -276,12 +283,14 @@ class ScrubDBQueue(ScrubQueue):
|
||||
super(ScrubDBQueue, self).__init__()
|
||||
self.cleanup_scrubber_time = CONF.cleanup_scrubber_time
|
||||
|
||||
def add_location(self, image_id, uri, user_context=None):
|
||||
def add_location(self, image_id, location, user_context=None):
|
||||
"""Adding image location to scrub queue.
|
||||
|
||||
:param image_id: The opaque image identifier
|
||||
:param uri: The opaque image location uri
|
||||
:param location: The opaque image location
|
||||
:param user_context: The user's request context
|
||||
|
||||
:retval A boolean value to indicate success or not
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@ -437,14 +446,15 @@ class Scrubber(object):
|
||||
LOG.info(_("Scrubbing image %(id)s from %(count)d locations.") %
|
||||
{'id': image_id, 'count': len(delete_jobs)})
|
||||
# NOTE(bourke): The starmap must be iterated to do work
|
||||
list(pool.starmap(self._delete_image_from_backend, delete_jobs))
|
||||
list(pool.starmap(self._delete_image_location_from_backend,
|
||||
delete_jobs))
|
||||
|
||||
image = self.registry.get_image(image_id)
|
||||
if (image['status'] == 'pending_delete' and
|
||||
not self.file_queue.has_image(image_id)):
|
||||
self.registry.update_image(image_id, {'status': 'deleted'})
|
||||
|
||||
def _delete_image_from_backend(self, image_id, uri):
|
||||
def _delete_image_location_from_backend(self, image_id, uri):
|
||||
if CONF.metadata_encryption_key is not None:
|
||||
uri = crypt.urlsafe_decrypt(CONF.metadata_encryption_key, uri)
|
||||
|
||||
|
@ -13,7 +13,6 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
import six
|
||||
@ -24,7 +23,6 @@ import glance.context
|
||||
import glance.domain.proxy
|
||||
from glance.openstack.common import importutils
|
||||
import glance.openstack.common.log as logging
|
||||
from glance import scrubber
|
||||
from glance.store import location
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -41,20 +39,8 @@ store_opts = [
|
||||
help=_("Default scheme to use to store image data. The "
|
||||
"scheme must be registered by one of the stores "
|
||||
"defined by the 'known_stores' config option.")),
|
||||
cfg.StrOpt('scrubber_datadir',
|
||||
default='/var/lib/glance/scrubber',
|
||||
help=_('Directory that the scrubber will use to track '
|
||||
'information about what to delete. '
|
||||
'Make sure this is set in glance-api.conf and '
|
||||
'glance-scrubber.conf.')),
|
||||
cfg.BoolOpt('delayed_delete', default=False,
|
||||
help=_('Turn on/off delayed delete.')),
|
||||
cfg.BoolOpt('use_user_token', default=True,
|
||||
help=_('Whether to pass through the user token when '
|
||||
'making requests to the registry.')),
|
||||
cfg.IntOpt('scrub_time', default=0,
|
||||
help=_('The amount of time in seconds to delay before '
|
||||
'performing a delete.')),
|
||||
]
|
||||
|
||||
REGISTERED_STORES = set()
|
||||
@ -318,42 +304,6 @@ def get_store_from_location(uri):
|
||||
return loc.store_name
|
||||
|
||||
|
||||
def safe_delete_from_backend(context, uri, image_id, **kwargs):
|
||||
"""Given a uri, delete an image from the store."""
|
||||
try:
|
||||
return delete_from_backend(context, uri, **kwargs)
|
||||
except exception.NotFound:
|
||||
msg = _('Failed to delete image %s in store from URI')
|
||||
LOG.warn(msg % image_id)
|
||||
except exception.StoreDeleteNotSupported as e:
|
||||
LOG.warn(utils.exception_to_str(e))
|
||||
except UnsupportedBackend:
|
||||
exc_type = sys.exc_info()[0].__name__
|
||||
msg = (_('Failed to delete image %(image_id)s from store '
|
||||
'(%(error)s)') % {'image_id': image_id,
|
||||
'error': exc_type})
|
||||
LOG.error(msg)
|
||||
|
||||
|
||||
def schedule_delayed_delete_from_backend(context, uri, image_id, **kwargs):
|
||||
"""Given a uri, schedule the deletion of an image location."""
|
||||
(file_queue, _db_queue) = scrubber.get_scrub_queues()
|
||||
# NOTE(zhiyan): Default ask glance-api store using file based queue.
|
||||
# In future we can change it using DB based queued instead,
|
||||
# such as using image location's status to saving pending delete flag
|
||||
# when that property be added.
|
||||
if CONF.use_user_token is False:
|
||||
context = None
|
||||
file_queue.add_location(image_id, uri, user_context=context)
|
||||
|
||||
|
||||
def delete_image_from_backend(context, store_api, image_id, uri):
|
||||
if CONF.delayed_delete:
|
||||
store_api.schedule_delayed_delete_from_backend(context, uri, image_id)
|
||||
else:
|
||||
store_api.safe_delete_from_backend(context, uri, image_id)
|
||||
|
||||
|
||||
def check_location_metadata(val, key=''):
|
||||
if isinstance(val, dict):
|
||||
for key in val:
|
||||
|
@ -472,6 +472,7 @@ class RegistryServer(Server):
|
||||
self.workers = 0
|
||||
self.api_version = 1
|
||||
self.user_storage_quota = '0'
|
||||
self.metadata_encryption_key = "012345678901234567890123456789ab"
|
||||
|
||||
self.conf_base = """[DEFAULT]
|
||||
verbose = %(verbose)s
|
||||
@ -488,6 +489,7 @@ owner_is_tenant = %(owner_is_tenant)s
|
||||
enable_v2_registry = %(enable_v2_registry)s
|
||||
workers = %(workers)s
|
||||
user_storage_quota = %(user_storage_quota)s
|
||||
metadata_encryption_key = %(metadata_encryption_key)s
|
||||
[paste_deploy]
|
||||
flavor = %(deployment_flavor)s
|
||||
"""
|
||||
|
@ -53,7 +53,8 @@ def build_image_fixture(**kwargs):
|
||||
'min_disk': 5,
|
||||
'min_ram': 256,
|
||||
'size': 19,
|
||||
'locations': [{'url': "file:///tmp/glance-tests/2", 'metadata': {}}],
|
||||
'locations': [{'url': "file:///tmp/glance-tests/2",
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
'properties': {},
|
||||
}
|
||||
image.update(kwargs)
|
||||
@ -169,22 +170,26 @@ class DriverTests(object):
|
||||
self.context, {'id': UUID1, 'status': 'queued'})
|
||||
|
||||
def test_image_create_with_locations(self):
|
||||
locations = [{'url': 'a', 'metadata': {}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
|
||||
{'url': 'b', 'metadata': {}, 'status': 'active'}]
|
||||
|
||||
fixture = {'status': 'queued',
|
||||
'locations': locations}
|
||||
image = self.db_api.image_create(self.context, fixture)
|
||||
actual = [{'url': l['url'], 'metadata': l['metadata']}
|
||||
actual = [{'url': l['url'], 'metadata': l['metadata'],
|
||||
'status': l['status']}
|
||||
for l in image['locations']]
|
||||
self.assertEqual(locations, actual)
|
||||
|
||||
def test_image_create_with_location_data(self):
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
|
||||
'status': 'active'},
|
||||
{'url': 'b', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
fixture = {'status': 'queued', 'locations': location_data}
|
||||
image = self.db_api.image_create(self.context, fixture)
|
||||
actual = [{'url': l['url'], 'metadata': l['metadata']}
|
||||
actual = [{'url': l['url'], 'metadata': l['metadata'],
|
||||
'status': l['status']}
|
||||
for l in image['locations']]
|
||||
self.assertEqual(location_data, actual)
|
||||
|
||||
@ -208,17 +213,28 @@ class DriverTests(object):
|
||||
self.assertNotEqual(image['created_at'], image['updated_at'])
|
||||
|
||||
def test_image_update_with_locations(self):
|
||||
locations = [{'url': 'a', 'metadata': {}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
locations = [{'url': 'a', 'metadata': {}, 'status': 'active'},
|
||||
{'url': 'b', 'metadata': {}, 'status': 'active'}]
|
||||
fixture = {'locations': locations}
|
||||
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
|
||||
self.assertEqual(2, len(image['locations']))
|
||||
self.assertIn('id', image['locations'][0])
|
||||
self.assertIn('id', image['locations'][1])
|
||||
image['locations'][0].pop('id')
|
||||
image['locations'][1].pop('id')
|
||||
self.assertEqual(locations, image['locations'])
|
||||
|
||||
def test_image_update_with_location_data(self):
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
|
||||
'status': 'active'},
|
||||
{'url': 'b', 'metadata': {}, 'status': 'active'}]
|
||||
fixture = {'locations': location_data}
|
||||
image = self.db_api.image_update(self.adm_context, UUID3, fixture)
|
||||
self.assertEqual(2, len(image['locations']))
|
||||
self.assertIn('id', image['locations'][0])
|
||||
self.assertIn('id', image['locations'][1])
|
||||
image['locations'][0].pop('id')
|
||||
image['locations'][1].pop('id')
|
||||
self.assertEqual(location_data, image['locations'])
|
||||
|
||||
def test_image_update(self):
|
||||
@ -762,8 +778,10 @@ class DriverTests(object):
|
||||
self.assertEqual(image['tags'], expected_tags[image['id']])
|
||||
|
||||
def test_image_destroy(self):
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
|
||||
'status': 'active'},
|
||||
{'url': 'b', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
fixture = {'status': 'queued', 'locations': location_data}
|
||||
image = self.db_api.image_create(self.context, fixture)
|
||||
IMG_ID = image['id']
|
||||
@ -775,6 +793,11 @@ class DriverTests(object):
|
||||
member = self.db_api.image_member_create(self.context, fixture)
|
||||
self.db_api.image_tag_create(self.context, IMG_ID, 'snarf')
|
||||
|
||||
self.assertEqual(2, len(image['locations']))
|
||||
self.assertIn('id', image['locations'][0])
|
||||
self.assertIn('id', image['locations'][1])
|
||||
image['locations'][0].pop('id')
|
||||
image['locations'][1].pop('id')
|
||||
self.assertEqual(location_data, image['locations'])
|
||||
self.assertEqual(('ping', 'pong', IMG_ID, False),
|
||||
(prop['name'], prop['value'],
|
||||
@ -808,8 +831,9 @@ class DriverTests(object):
|
||||
elements of the image to be deleted.
|
||||
"""
|
||||
TENANT2 = str(uuid.uuid4())
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'}},
|
||||
{'url': 'b', 'metadata': {}}]
|
||||
location_data = [{'url': 'a', 'metadata': {'key': 'value'},
|
||||
'status': 'active'},
|
||||
{'url': 'b', 'metadata': {}, 'status': 'active'}]
|
||||
|
||||
def _create_image_with_child_entries():
|
||||
fixture = {'status': 'queued', 'locations': location_data}
|
||||
@ -837,6 +861,11 @@ class DriverTests(object):
|
||||
self.assertFalse(active_image['deleted'])
|
||||
self.assertFalse(active_image['deleted_at'])
|
||||
|
||||
self.assertEqual(len(active_image['locations']), 2)
|
||||
self.assertTrue('id' in active_image['locations'][0])
|
||||
self.assertTrue('id' in active_image['locations'][1])
|
||||
active_image['locations'][0].pop('id')
|
||||
active_image['locations'][1].pop('id')
|
||||
self.assertEqual(location_data, active_image['locations'])
|
||||
self.assertEqual(1, len(active_image['properties']))
|
||||
prop = active_image['properties'][0]
|
||||
@ -1258,7 +1287,8 @@ class DriverQuotaTests(test_utils.BaseTestCase):
|
||||
'owner': self.owner_id1}
|
||||
new_fixture = build_image_fixture(**new_fixture_dict)
|
||||
new_fixture['locations'].append({'url': 'file:///some/path/file',
|
||||
'metadata': {}})
|
||||
'metadata': {},
|
||||
'status': 'active'})
|
||||
self.db_api.image_create(self.context1, new_fixture)
|
||||
|
||||
total = reduce(lambda x, y: x + y,
|
||||
@ -1278,7 +1308,8 @@ class DriverQuotaTests(test_utils.BaseTestCase):
|
||||
'owner': self.owner_id1}
|
||||
new_fixture = build_image_fixture(**new_fixture_dict)
|
||||
new_fixture['locations'].append({'url': 'file:///some/path/file',
|
||||
'metadata': {}})
|
||||
'metadata': {},
|
||||
'status': 'active'})
|
||||
self.db_api.image_create(self.context1, new_fixture)
|
||||
|
||||
total = reduce(lambda x, y: x + y,
|
||||
|
@ -140,13 +140,15 @@ class TestImageRepo(test_utils.BaseTestCase):
|
||||
name='1', size=256,
|
||||
is_public=True, status='active',
|
||||
locations=[{'url': UUID1_LOCATION,
|
||||
'metadata': UUID1_LOCATION_METADATA}]),
|
||||
'metadata': UUID1_LOCATION_METADATA,
|
||||
'status': 'active'}]),
|
||||
_db_fixture(UUID2, owner=TENANT1, checksum=CHCKSUM1,
|
||||
name='2', size=512, is_public=False),
|
||||
_db_fixture(UUID3, owner=TENANT3, checksum=CHCKSUM1,
|
||||
name='3', size=1024, is_public=True,
|
||||
locations=[{'url': UUID3_LOCATION,
|
||||
'metadata': {}}]),
|
||||
'metadata': {},
|
||||
'status': 'active'}]),
|
||||
_db_fixture(UUID4, owner=TENANT4, name='4', size=2048),
|
||||
]
|
||||
[self.db.image_create(None, image) for image in self.images]
|
||||
@ -359,8 +361,10 @@ class TestEncryptedLocations(test_utils.BaseTestCase):
|
||||
self.image_factory = glance.domain.ImageFactory()
|
||||
self.crypt_key = '0123456789abcdef'
|
||||
self.config(metadata_encryption_key=self.crypt_key)
|
||||
self.foo_bar_location = [{'url': 'foo', 'metadata': {}},
|
||||
{'url': 'bar', 'metadata': {}}]
|
||||
self.foo_bar_location = [{'url': 'foo', 'metadata': {},
|
||||
'status': 'active'},
|
||||
{'url': 'bar', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
|
||||
def test_encrypt_locations_on_add(self):
|
||||
image = self.image_factory.new_image(UUID1)
|
||||
@ -387,30 +391,40 @@ class TestEncryptedLocations(test_utils.BaseTestCase):
|
||||
|
||||
def test_decrypt_locations_on_get(self):
|
||||
url_loc = ['ping', 'pong']
|
||||
orig_locations = [{'url': l, 'metadata': {}} for l in url_loc]
|
||||
orig_locations = [{'url': l, 'metadata': {}, 'status': 'active'}
|
||||
for l in url_loc]
|
||||
encrypted_locs = [crypt.urlsafe_encrypt(self.crypt_key, l)
|
||||
for l in url_loc]
|
||||
encrypted_locations = [{'url': l, 'metadata': {}}
|
||||
encrypted_locations = [{'url': l, 'metadata': {}, 'status': 'active'}
|
||||
for l in encrypted_locs]
|
||||
self.assertNotEqual(encrypted_locations, orig_locations)
|
||||
db_data = _db_fixture(UUID1, owner=TENANT1,
|
||||
locations=encrypted_locations)
|
||||
self.db.image_create(None, db_data)
|
||||
image = self.image_repo.get(UUID1)
|
||||
self.assertIn('id', image.locations[0])
|
||||
self.assertIn('id', image.locations[1])
|
||||
image.locations[0].pop('id')
|
||||
image.locations[1].pop('id')
|
||||
self.assertEqual(image.locations, orig_locations)
|
||||
|
||||
def test_decrypt_locations_on_list(self):
|
||||
url_loc = ['ping', 'pong']
|
||||
orig_locations = [{'url': l, 'metadata': {}} for l in url_loc]
|
||||
orig_locations = [{'url': l, 'metadata': {}, 'status': 'active'}
|
||||
for l in url_loc]
|
||||
encrypted_locs = [crypt.urlsafe_encrypt(self.crypt_key, l)
|
||||
for l in url_loc]
|
||||
encrypted_locations = [{'url': l, 'metadata': {}}
|
||||
encrypted_locations = [{'url': l, 'metadata': {}, 'status': 'active'}
|
||||
for l in encrypted_locs]
|
||||
self.assertNotEqual(encrypted_locations, orig_locations)
|
||||
db_data = _db_fixture(UUID1, owner=TENANT1,
|
||||
locations=encrypted_locations)
|
||||
self.db.image_create(None, db_data)
|
||||
image = self.image_repo.list()[0]
|
||||
self.assertIn('id', image.locations[0])
|
||||
self.assertIn('id', image.locations[1])
|
||||
image.locations[0].pop('id')
|
||||
image.locations[1].pop('id')
|
||||
self.assertEqual(image.locations, orig_locations)
|
||||
|
||||
|
||||
|
@ -17,6 +17,7 @@ from six.moves import xrange
|
||||
import stubout
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common.store_utils import safe_delete_from_backend
|
||||
from glance import context
|
||||
from glance.db.sqlalchemy import api as db_api
|
||||
from glance.registry.client.v1.api import configure_registry_client
|
||||
@ -24,7 +25,6 @@ from glance.store import delete_from_backend
|
||||
from glance.store.http import MAX_REDIRECTS
|
||||
from glance.store.http import Store
|
||||
from glance.store.location import get_location_from_uri
|
||||
from glance.store import safe_delete_from_backend
|
||||
from glance.tests import stubs as test_stubs
|
||||
from glance.tests.unit import base
|
||||
from glance.tests import utils
|
||||
@ -181,10 +181,10 @@ class TestHttpStore(base.StoreClearingUnitTest):
|
||||
delete_from_backend, ctx, uri)
|
||||
|
||||
def test_http_schedule_delete_swallows_error(self):
|
||||
uri = "https://netloc/path/to/file.tar.gz"
|
||||
uri = {"url": "https://netloc/path/to/file.tar.gz"}
|
||||
ctx = context.RequestContext()
|
||||
stub_out_registry_image_update(self.stubs)
|
||||
try:
|
||||
safe_delete_from_backend(ctx, uri, 'image_id')
|
||||
safe_delete_from_backend(ctx, 'image_id', uri)
|
||||
except exception.StoreDeleteNotSupported:
|
||||
self.fail('StoreDeleteNotSupported should be swallowed')
|
||||
|
@ -20,9 +20,9 @@ import uuid
|
||||
import six
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import store_utils
|
||||
from glance.openstack.common import units
|
||||
import glance.quota
|
||||
import glance.store
|
||||
from glance.tests.unit import utils as unit_test_utils
|
||||
from glance.tests import utils as test_utils
|
||||
|
||||
@ -56,14 +56,16 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def _get_image(self, location_count=1, image_size=10):
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = 'xyz'
|
||||
base_image.size = image_size
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
locations = []
|
||||
for i in range(location_count):
|
||||
locations.append({'url': 'file:///g/there/it/is%d' % i,
|
||||
'metadata': {}})
|
||||
'metadata': {}, 'status': 'active'})
|
||||
image_values = {'id': 'xyz', 'owner': context.owner,
|
||||
'status': 'active', 'size': image_size,
|
||||
'locations': locations}
|
||||
@ -75,9 +77,11 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.config(user_storage_quota=str(quota))
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = 'id'
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
data = '*' * quota
|
||||
base_image.set_data(data, size=None)
|
||||
image.set_data(data)
|
||||
@ -87,9 +91,11 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.config(user_storage_quota=config_quota)
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = 'id'
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
data = '*' * data_length
|
||||
base_image.set_data(data, size=None)
|
||||
image.set_data(data)
|
||||
@ -115,16 +121,18 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.config(user_storage_quota=quota)
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = 'id'
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
|
||||
if deleted:
|
||||
with patch.object(glance.store, 'safe_delete_from_backend'):
|
||||
glance.store.safe_delete_from_backend(
|
||||
with patch.object(store_utils, 'safe_delete_from_backend'):
|
||||
store_utils.safe_delete_from_backend(
|
||||
context,
|
||||
base_image.locations[0]['url'],
|
||||
image.image_id)
|
||||
image.image_id,
|
||||
base_image.locations[0])
|
||||
|
||||
self.assertRaises(exception.StorageQuotaFull,
|
||||
image.set_data,
|
||||
@ -168,7 +176,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
size=quota - 1)
|
||||
|
||||
def test_append_location(self):
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {}}
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}
|
||||
image = self._get_image()
|
||||
pre_add_locations = image.locations[:]
|
||||
image.locations.append(new_location)
|
||||
@ -176,7 +185,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.assertEqual(image.locations, pre_add_locations)
|
||||
|
||||
def test_insert_location(self):
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {}}
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}
|
||||
image = self._get_image()
|
||||
pre_add_locations = image.locations[:]
|
||||
image.locations.insert(0, new_location)
|
||||
@ -184,7 +194,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.assertEqual(image.locations, pre_add_locations)
|
||||
|
||||
def test_extend_location(self):
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {}}
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}
|
||||
image = self._get_image()
|
||||
pre_add_locations = image.locations[:]
|
||||
image.locations.extend([new_location])
|
||||
@ -192,7 +203,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.assertEqual(image.locations, pre_add_locations)
|
||||
|
||||
def test_iadd_location(self):
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {}}
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}
|
||||
image = self._get_image()
|
||||
pre_add_locations = image.locations[:]
|
||||
image.locations += [new_location]
|
||||
@ -200,7 +212,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
self.assertEqual(image.locations, pre_add_locations)
|
||||
|
||||
def test_set_location(self):
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {}}
|
||||
new_location = {'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}
|
||||
image = self._get_image()
|
||||
image.locations = [new_location]
|
||||
self.assertEqual(image.locations, [new_location])
|
||||
@ -215,30 +228,36 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
image = self._make_image_with_quota()
|
||||
self.assertRaises(exception.StorageQuotaFull,
|
||||
image.locations.append,
|
||||
{'url': 'file:///a/path', 'metadata': {}})
|
||||
{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'})
|
||||
|
||||
def test_exceed_insert_location(self):
|
||||
image = self._make_image_with_quota()
|
||||
self.assertRaises(exception.StorageQuotaFull,
|
||||
image.locations.insert,
|
||||
0,
|
||||
{'url': 'file:///a/path', 'metadata': {}})
|
||||
{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'})
|
||||
|
||||
def test_exceed_extend_location(self):
|
||||
image = self._make_image_with_quota()
|
||||
self.assertRaises(exception.StorageQuotaFull,
|
||||
image.locations.extend,
|
||||
[{'url': 'file:///a/path', 'metadata': {}}])
|
||||
[{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}])
|
||||
|
||||
def test_set_location_under(self):
|
||||
image = self._make_image_with_quota(location_count=1)
|
||||
image.locations = [{'url': 'file:///a/path', 'metadata': {}}]
|
||||
image.locations = [{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
|
||||
def test_set_location_exceed(self):
|
||||
image = self._make_image_with_quota(location_count=1)
|
||||
try:
|
||||
image.locations = [{'url': 'file:///a/path', 'metadata': {}},
|
||||
{'url': 'file:///a/path2', 'metadata': {}}]
|
||||
image.locations = [{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'},
|
||||
{'url': 'file:///a/path2', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.fail('Should have raised the quota exception')
|
||||
except exception.StorageQuotaFull:
|
||||
pass
|
||||
@ -246,7 +265,8 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_iadd_location_exceed(self):
|
||||
image = self._make_image_with_quota(location_count=1)
|
||||
try:
|
||||
image.locations += [{'url': 'file:///a/path', 'metadata': {}}]
|
||||
image.locations += [{'url': 'file:///a/path', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.fail('Should have raised the quota exception')
|
||||
except exception.StorageQuotaFull:
|
||||
pass
|
||||
@ -254,12 +274,14 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_append_location_for_queued_image(self):
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = str(uuid.uuid4())
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
self.assertIsNone(image.size)
|
||||
|
||||
self.stubs.Set(glance.store, 'get_size_from_backend',
|
||||
self.stubs.Set(store_api, 'get_size_from_backend',
|
||||
unit_test_utils.fake_get_size_from_backend)
|
||||
image.locations.append({'url': 'file:///fake.img.tar.gz',
|
||||
'metadata': {}})
|
||||
@ -269,12 +291,14 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_insert_location_for_queued_image(self):
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = str(uuid.uuid4())
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
self.assertIsNone(image.size)
|
||||
|
||||
self.stubs.Set(glance.store, 'get_size_from_backend',
|
||||
self.stubs.Set(store_api, 'get_size_from_backend',
|
||||
unit_test_utils.fake_get_size_from_backend)
|
||||
image.locations.insert(0,
|
||||
{'url': 'file:///fake.img.tar.gz',
|
||||
@ -285,12 +309,14 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_set_location_for_queued_image(self):
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = str(uuid.uuid4())
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
self.assertIsNone(image.size)
|
||||
|
||||
self.stubs.Set(glance.store, 'get_size_from_backend',
|
||||
self.stubs.Set(store_api, 'get_size_from_backend',
|
||||
unit_test_utils.fake_get_size_from_backend)
|
||||
image.locations = [{'url': 'file:///fake.img.tar.gz', 'metadata': {}}]
|
||||
self.assertEqual([{'url': 'file:///fake.img.tar.gz', 'metadata': {}}],
|
||||
@ -299,12 +325,14 @@ class TestImageQuota(test_utils.BaseTestCase):
|
||||
def test_iadd_location_for_queued_image(self):
|
||||
context = FakeContext()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
base_image = FakeImage()
|
||||
base_image.image_id = str(uuid.uuid4())
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api)
|
||||
image = glance.quota.ImageProxy(base_image, context, db_api, store)
|
||||
self.assertIsNone(image.size)
|
||||
|
||||
self.stubs.Set(glance.store, 'get_size_from_backend',
|
||||
self.stubs.Set(store_api, 'get_size_from_backend',
|
||||
unit_test_utils.fake_get_size_from_backend)
|
||||
image.locations += [{'url': 'file:///fake.img.tar.gz', 'metadata': {}}]
|
||||
self.assertIn({'url': 'file:///fake.img.tar.gz', 'metadata': {}},
|
||||
@ -316,6 +344,7 @@ class TestImagePropertyQuotas(test_utils.BaseTestCase):
|
||||
super(TestImagePropertyQuotas, self).setUp()
|
||||
self.base_image = mock.Mock()
|
||||
self.image = glance.quota.ImageProxy(self.base_image,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
@ -324,6 +353,7 @@ class TestImagePropertyQuotas(test_utils.BaseTestCase):
|
||||
self.image_repo_proxy = glance.quota.ImageRepoProxy(
|
||||
self.image_repo_mock,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
def test_save_image_with_image_property(self):
|
||||
@ -381,6 +411,7 @@ class TestImageTagQuotas(test_utils.BaseTestCase):
|
||||
self.base_image = mock.Mock()
|
||||
self.base_image.tags = set([])
|
||||
self.image = glance.quota.ImageProxy(self.base_image,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
@ -388,6 +419,7 @@ class TestImageTagQuotas(test_utils.BaseTestCase):
|
||||
self.image_repo_proxy = glance.quota.ImageRepoProxy(
|
||||
self.image_repo_mock,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
def test_replace_image_tag(self):
|
||||
@ -478,12 +510,14 @@ class TestImageMemberQuotas(test_utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestImageMemberQuotas, self).setUp()
|
||||
db_api = unit_test_utils.FakeDB()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store = unit_test_utils.FakeStoreUtils(store_api)
|
||||
context = FakeContext()
|
||||
self.image = mock.Mock()
|
||||
self.base_image_member_factory = mock.Mock()
|
||||
self.image_member_factory = glance.quota.ImageMemberFactoryProxy(
|
||||
self.base_image_member_factory, context,
|
||||
db_api)
|
||||
db_api, store)
|
||||
|
||||
def test_new_image_member(self):
|
||||
self.config(image_member_quota=1)
|
||||
@ -516,6 +550,7 @@ class TestImageLocationQuotas(test_utils.BaseTestCase):
|
||||
self.base_image.locations = []
|
||||
self.base_image.size = 1
|
||||
self.image = glance.quota.ImageProxy(self.base_image,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
@ -523,6 +558,7 @@ class TestImageLocationQuotas(test_utils.BaseTestCase):
|
||||
self.image_repo_proxy = glance.quota.ImageRepoProxy(
|
||||
self.image_repo_mock,
|
||||
mock.Mock(),
|
||||
mock.Mock(),
|
||||
mock.Mock())
|
||||
|
||||
def test_replace_image_location(self):
|
||||
|
@ -82,13 +82,15 @@ class FakeMemberRepo(object):
|
||||
class TestStoreImage(utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
locations = [{'url': '%s/%s' % (BASE_URI, UUID1),
|
||||
'metadata': {}}]
|
||||
'metadata': {}, 'status': 'active'}]
|
||||
self.image_stub = ImageStub(UUID1, 'active', locations)
|
||||
self.store_api = unit_test_utils.FakeStoreAPI()
|
||||
self.store_utils = unit_test_utils.FakeStoreUtils(self.store_api)
|
||||
super(TestStoreImage, self).setUp()
|
||||
|
||||
def test_image_delete(self):
|
||||
image = glance.location.ImageProxy(self.image_stub, {}, self.store_api)
|
||||
image = glance.location.ImageProxy(self.image_stub, {},
|
||||
self.store_api, self.store_utils)
|
||||
location = image.locations[0]
|
||||
self.assertEqual(image.status, 'active')
|
||||
self.store_api.get_from_backend({}, location['url'])
|
||||
@ -98,7 +100,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
self.store_api.get_from_backend, {}, location['url'])
|
||||
|
||||
def test_image_get_data(self):
|
||||
image = glance.location.ImageProxy(self.image_stub, {}, self.store_api)
|
||||
image = glance.location.ImageProxy(self.image_stub, {},
|
||||
self.store_api, self.store_utils)
|
||||
self.assertEqual(image.get_data(), 'XXX')
|
||||
|
||||
def test_image_get_data_from_second_location(self):
|
||||
@ -109,7 +112,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
return self.data[location]
|
||||
|
||||
image1 = glance.location.ImageProxy(self.image_stub, {},
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
self.assertEqual(image1.get_data(), 'XXX')
|
||||
# Multiple location support
|
||||
context = glance.context.RequestContext(user=USER1)
|
||||
@ -131,7 +134,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
context = glance.context.RequestContext(user=USER1)
|
||||
image_stub = ImageStub(UUID2, status='queued', locations=[])
|
||||
image = glance.location.ImageProxy(image_stub, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data('YYYY', 4)
|
||||
self.assertEqual(image.size, 4)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
@ -144,7 +147,9 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
image_stub = ImageStub(UUID2, status='queued', locations=[])
|
||||
loc_meta = {'key': 'value5032'}
|
||||
store_api = unit_test_utils.FakeStoreAPI(store_metadata=loc_meta)
|
||||
image = glance.location.ImageProxy(image_stub, context, store_api)
|
||||
store_utils = unit_test_utils.FakeStoreUtils(store_api)
|
||||
image = glance.location.ImageProxy(image_stub, context,
|
||||
store_api, store_utils)
|
||||
image.set_data('YYYY', 4)
|
||||
self.assertEqual(image.size, 4)
|
||||
location_data = image.locations[0]
|
||||
@ -161,7 +166,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
def test_image_set_data_unknown_size(self):
|
||||
context = glance.context.RequestContext(user=USER1)
|
||||
image_stub = ImageStub(UUID2, status='queued', locations=[])
|
||||
image = glance.location.ImageProxy(image_stub, context, self.store_api)
|
||||
image = glance.location.ImageProxy(image_stub, context,
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data('YYYY', None)
|
||||
self.assertEqual(image.size, 4)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
@ -176,12 +182,12 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
|
||||
def _add_image(self, context, image_id, data, len):
|
||||
image_stub = ImageStub(image_id, status='queued', locations=[])
|
||||
image = glance.location.ImageProxy(image_stub,
|
||||
context, self.store_api)
|
||||
image = glance.location.ImageProxy(image_stub, context,
|
||||
self.store_api, self.store_utils)
|
||||
image.set_data(data, len)
|
||||
self.assertEqual(image.size, len)
|
||||
#NOTE(markwash): FakeStore returns image_id for location
|
||||
location = {'url': image_id, 'metadata': {}}
|
||||
location = {'url': image_id, 'metadata': {}, 'status': 'active'}
|
||||
self.assertEqual(image.locations, [location])
|
||||
self.assertEqual(image_stub.locations, [location])
|
||||
self.assertEqual(image.status, 'active')
|
||||
@ -235,8 +241,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
|
||||
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
|
||||
|
||||
image1.locations.append(location3)
|
||||
|
||||
@ -259,8 +265,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
|
||||
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
|
||||
|
||||
image1.locations.append(location3)
|
||||
|
||||
@ -324,8 +330,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
|
||||
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
|
||||
|
||||
image1.locations.extend([location3])
|
||||
|
||||
@ -348,8 +354,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
|
||||
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
|
||||
location_bad = {'url': 'unknown://location', 'metadata': {}}
|
||||
|
||||
image1.locations.extend([location3])
|
||||
@ -426,8 +432,8 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
location2 = {'url': UUID2, 'metadata': {}, 'status': 'active'}
|
||||
location3 = {'url': UUID3, 'metadata': {}, 'status': 'active'}
|
||||
|
||||
image1.locations.insert(0, location3)
|
||||
|
||||
@ -476,7 +482,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
context = glance.context.RequestContext(user=USER1)
|
||||
image_stub1 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image1 = glance.location.ImageProxy(image_stub1, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location_bad = {'url': 'unknown://location', 'metadata': {}}
|
||||
|
||||
@ -498,7 +504,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
|
||||
image_stub2 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image2 = glance.location.ImageProxy(image_stub2, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location_bad = {'url': UUID2, 'metadata': "a invalid metadata"}
|
||||
|
||||
@ -523,7 +529,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
|
||||
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image3 = glance.location.ImageProxy(image_stub3, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
@ -550,8 +556,9 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image1, image_stub1) = self._add_image(context, UUID2, 'XXXX', 4)
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
|
||||
image3 = glance.location.ImageProxy(image_stub3, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
@ -578,7 +585,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
(image2, image_stub2) = self._add_image(context, UUID3, 'YYYY', 4)
|
||||
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image3 = glance.location.ImageProxy(image_stub3, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
@ -607,7 +614,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
|
||||
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image3 = glance.location.ImageProxy(image_stub3, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
|
||||
location2 = {'url': UUID2, 'metadata': {}}
|
||||
location3 = {'url': UUID3, 'metadata': {}}
|
||||
@ -640,7 +647,7 @@ class TestStoreImage(utils.BaseTestCase):
|
||||
|
||||
image_stub3 = ImageStub('fake_image_id', status='queued', locations=[])
|
||||
image3 = glance.location.ImageProxy(image_stub3, context,
|
||||
self.store_api)
|
||||
self.store_api, self.store_utils)
|
||||
image3.locations += [location2, location3]
|
||||
|
||||
image_stub3.locations.reverse()
|
||||
@ -662,16 +669,20 @@ class TestStoreImageRepo(utils.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestStoreImageRepo, self).setUp()
|
||||
self.store_api = unit_test_utils.FakeStoreAPI()
|
||||
store_utils = unit_test_utils.FakeStoreUtils(self.store_api)
|
||||
self.image_stub = ImageStub(UUID1)
|
||||
self.image = glance.location.ImageProxy(self.image_stub,
|
||||
{}, self.store_api)
|
||||
self.image = glance.location.ImageProxy(self.image_stub, {},
|
||||
self.store_api, store_utils)
|
||||
self.image_repo_stub = ImageRepoStub()
|
||||
self.image_repo = glance.location.ImageRepoProxy(self.image_repo_stub,
|
||||
{}, self.store_api)
|
||||
{}, self.store_api,
|
||||
store_utils)
|
||||
|
||||
def test_add_updates_acls(self):
|
||||
self.image_stub.locations = [{'url': 'foo', 'metadata': {}},
|
||||
{'url': 'bar', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'foo', 'metadata': {},
|
||||
'status': 'active'},
|
||||
{'url': 'bar', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_stub.visibility = 'public'
|
||||
self.image_repo.add(self.image)
|
||||
self.assertTrue(self.store_api.acls['foo']['public'])
|
||||
@ -688,12 +699,14 @@ class TestStoreImageRepo(utils.BaseTestCase):
|
||||
self.assertEqual(len(self.store_api.acls), 0)
|
||||
|
||||
def test_save_updates_acls(self):
|
||||
self.image_stub.locations = [{'url': 'foo', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'foo', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_repo.save(self.image)
|
||||
self.assertIn('foo', self.store_api.acls)
|
||||
|
||||
def test_add_fetches_members_if_private(self):
|
||||
self.image_stub.locations = [{'url': 'glue', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'glue', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_stub.visibility = 'private'
|
||||
self.image_repo.add(self.image)
|
||||
self.assertIn('glue', self.store_api.acls)
|
||||
@ -703,7 +716,8 @@ class TestStoreImageRepo(utils.BaseTestCase):
|
||||
self.assertEqual(acls['read'], [TENANT1, TENANT2])
|
||||
|
||||
def test_save_fetches_members_if_private(self):
|
||||
self.image_stub.locations = [{'url': 'glue', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'glue', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_stub.visibility = 'private'
|
||||
self.image_repo.save(self.image)
|
||||
self.assertIn('glue', self.store_api.acls)
|
||||
@ -713,7 +727,8 @@ class TestStoreImageRepo(utils.BaseTestCase):
|
||||
self.assertEqual(acls['read'], [TENANT1, TENANT2])
|
||||
|
||||
def test_member_addition_updates_acls(self):
|
||||
self.image_stub.locations = [{'url': 'glug', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'glug', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_stub.visibility = 'private'
|
||||
member_repo = self.image.get_member_repo()
|
||||
membership = glance.domain.ImageMembership(
|
||||
@ -726,7 +741,8 @@ class TestStoreImageRepo(utils.BaseTestCase):
|
||||
self.assertEqual(acls['read'], [TENANT1, TENANT2, TENANT3])
|
||||
|
||||
def test_member_removal_updates_acls(self):
|
||||
self.image_stub.locations = [{'url': 'glug', 'metadata': {}}]
|
||||
self.image_stub.locations = [{'url': 'glug', 'metadata': {},
|
||||
'status': 'active'}]
|
||||
self.image_stub.visibility = 'private'
|
||||
member_repo = self.image.get_member_repo()
|
||||
membership = glance.domain.ImageMembership(
|
||||
@ -743,10 +759,13 @@ class TestImageFactory(utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestImageFactory, self).setUp()
|
||||
store_api = unit_test_utils.FakeStoreAPI()
|
||||
store_utils = unit_test_utils.FakeStoreUtils(store_api)
|
||||
self.image_factory = glance.location.ImageFactoryProxy(
|
||||
ImageFactoryStub(),
|
||||
glance.context.RequestContext(user=USER1),
|
||||
unit_test_utils.FakeStoreAPI())
|
||||
store_api,
|
||||
store_utils)
|
||||
|
||||
def test_new_image(self):
|
||||
image = self.image_factory.new_image()
|
||||
|
@ -70,7 +70,7 @@ class FakeDB(object):
|
||||
images = [
|
||||
{'id': UUID1, 'owner': TENANT1, 'status': 'queued',
|
||||
'locations': [{'url': '%s/%s' % (BASE_URI, UUID1),
|
||||
'metadata': {}}]},
|
||||
'metadata': {}, 'status': 'queued'}]},
|
||||
{'id': UUID2, 'owner': TENANT1, 'status': 'queued'},
|
||||
]
|
||||
[simple_db.image_create(None, image) for image in images]
|
||||
@ -91,6 +91,28 @@ class FakeDB(object):
|
||||
return getattr(simple_db, key)
|
||||
|
||||
|
||||
class FakeStoreUtils(object):
|
||||
def __init__(self, store_api):
|
||||
self.store_api = store_api
|
||||
|
||||
def safe_delete_from_backend(self, context, id, location):
|
||||
try:
|
||||
del self.store_api.data[location['url']]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def schedule_delayed_delete_from_backend(self, context, id, location):
|
||||
pass
|
||||
|
||||
def delete_image_location_from_backend(self, context,
|
||||
image_id, location):
|
||||
if CONF.delayed_delete:
|
||||
self.schedule_delayed_delete_from_backend(context, image_id,
|
||||
location)
|
||||
else:
|
||||
self.safe_delete_from_backend(context, image_id, location)
|
||||
|
||||
|
||||
class FakeStoreAPI(object):
|
||||
def __init__(self, store_metadata=None):
|
||||
self.data = {
|
||||
@ -128,21 +150,6 @@ class FakeStoreAPI(object):
|
||||
except KeyError:
|
||||
raise exception.NotFound()
|
||||
|
||||
def safe_delete_from_backend(self, context, uri, id, **kwargs):
|
||||
try:
|
||||
del self.data[uri]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def schedule_delayed_delete_from_backend(self, context, uri, id, **kwargs):
|
||||
pass
|
||||
|
||||
def delete_image_from_backend(self, context, store_api, image_id, uri):
|
||||
if CONF.delayed_delete:
|
||||
self.schedule_delayed_delete_from_backend(context, uri, image_id)
|
||||
else:
|
||||
self.safe_delete_from_backend(context, uri, image_id)
|
||||
|
||||
def get_size_from_backend(self, context, location):
|
||||
return self.get_from_backend(context, location)[1]
|
||||
|
||||
|
@ -73,7 +73,7 @@ class TestGlanceAPI(base.IsolatedUnitTest):
|
||||
'checksum': None,
|
||||
'size': 13,
|
||||
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID1),
|
||||
'metadata': {}}],
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
'properties': {'type': 'kernel'}},
|
||||
{'id': UUID2,
|
||||
'name': 'fake image #2',
|
||||
@ -88,7 +88,7 @@ class TestGlanceAPI(base.IsolatedUnitTest):
|
||||
'checksum': 'abc123',
|
||||
'size': 19,
|
||||
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID2),
|
||||
'metadata': {}}],
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
'properties': {}}]
|
||||
self.context = glance.context.RequestContext(is_admin=True)
|
||||
glance.api.v1.images.validate_location = mock.Mock()
|
||||
|
@ -58,7 +58,7 @@ class TestRegistryAPI(base.IsolatedUnitTest, test_utils.RegistryAPIMixIn):
|
||||
return self.get_extra_fixture(
|
||||
id, name,
|
||||
locations=[{'url': "file:///%s/%s" % (self.test_dir, id),
|
||||
'metadata': {}}], **kwargs)
|
||||
'metadata': {}, 'status': 'active'}], **kwargs)
|
||||
|
||||
self.FIXTURES = [
|
||||
_get_extra_fixture(UUID1, 'fake image #1', is_public=False,
|
||||
@ -1763,7 +1763,7 @@ class TestRegistryAPILocations(base.IsolatedUnitTest,
|
||||
return self.get_extra_fixture(
|
||||
id, name,
|
||||
locations=[{'url': "file:///%s/%s" % (self.test_dir, id),
|
||||
'metadata': {}}], **kwargs)
|
||||
'metadata': {}, 'status': 'active'}], **kwargs)
|
||||
|
||||
self.FIXTURES = [
|
||||
_get_extra_fixture(UUID1, 'fake image #1', is_public=False,
|
||||
@ -1789,6 +1789,8 @@ class TestRegistryAPILocations(base.IsolatedUnitTest,
|
||||
self.assertEqual(res.status_int, 200)
|
||||
res_dict = jsonutils.loads(res.body)
|
||||
image = res_dict['image']
|
||||
self.assertIn('id', image['location_data'][0])
|
||||
image['location_data'][0].pop('id')
|
||||
self.assertEqual(self.FIXTURES[0]['locations'][0],
|
||||
image['location_data'][0])
|
||||
self.assertEqual(self.FIXTURES[0]['locations'][0]['url'],
|
||||
@ -1802,6 +1804,8 @@ class TestRegistryAPILocations(base.IsolatedUnitTest,
|
||||
self.assertEqual(res.status_int, 200)
|
||||
res_dict = jsonutils.loads(res.body)
|
||||
image = res_dict['image']
|
||||
self.assertIn('id', image['location_data'][0])
|
||||
image['location_data'][0].pop('id')
|
||||
self.assertEqual(self.FIXTURES[1]['locations'][0],
|
||||
image['location_data'][0])
|
||||
self.assertEqual(self.FIXTURES[1]['locations'][0]['url'],
|
||||
@ -1828,9 +1832,11 @@ class TestRegistryAPILocations(base.IsolatedUnitTest,
|
||||
'size': 19,
|
||||
'location': encrypted_location_url1,
|
||||
'location_data': [{'url': encrypted_location_url1,
|
||||
'metadata': {'key': 'value'}},
|
||||
'metadata': {'key': 'value'},
|
||||
'status': 'active'},
|
||||
{'url': encrypted_location_url2,
|
||||
'metadata': {'key': 'value'}}]}
|
||||
'metadata': {'key': 'value'},
|
||||
'status': 'active'}]}
|
||||
|
||||
self.config(metadata_encryption_key=encryption_key)
|
||||
req = webob.Request.blank('/images')
|
||||
|
@ -677,7 +677,8 @@ class TestRegistryV1Client(base.IsolatedUnitTest, test_utils.RegistryAPIMixIn):
|
||||
location = "file:///tmp/glance-tests/2"
|
||||
loc_meta = {'key': 'value'}
|
||||
fixture = self.get_fixture(location_data=[{'url': location,
|
||||
'metadata': loc_meta}],
|
||||
'metadata': loc_meta,
|
||||
'status': 'active'}],
|
||||
properties={'distro': 'Ubuntu 10.04 LTS'})
|
||||
|
||||
new_image = self.client.add_image(fixture)
|
||||
@ -700,9 +701,11 @@ class TestRegistryV1Client(base.IsolatedUnitTest, test_utils.RegistryAPIMixIn):
|
||||
'container_format': 'ovf',
|
||||
'size': 19,
|
||||
'location_data': [{'url': location % 1,
|
||||
'metadata': loc_meta},
|
||||
'metadata': loc_meta,
|
||||
'status': 'active'},
|
||||
{'url': location % 2,
|
||||
'metadata': {}}],
|
||||
'metadata': {},
|
||||
'status': 'active'}],
|
||||
'properties': {'distro': 'Ubuntu 10.04 LTS'}}
|
||||
|
||||
new_image = self.client.add_image(fixture)
|
||||
|
@ -19,8 +19,8 @@ import webob.exc
|
||||
|
||||
from glance.api.v1 import upload_utils
|
||||
from glance.common import exception
|
||||
from glance.common import store_utils
|
||||
import glance.registry.client.v1.api as registry
|
||||
import glance.store
|
||||
from glance.tests.unit import base
|
||||
import glance.tests.unit.utils as unit_test_utils
|
||||
|
||||
@ -37,11 +37,13 @@ class TestUploadUtils(base.StoreClearingUnitTest):
|
||||
|
||||
def test_initiate_delete(self):
|
||||
req = unit_test_utils.get_fake_request()
|
||||
location = "file://foo/bar"
|
||||
location = {"url": "file://foo/bar",
|
||||
"metadata": {},
|
||||
"status": "active"}
|
||||
id = unit_test_utils.UUID1
|
||||
|
||||
self.mox.StubOutWithMock(glance.store, "safe_delete_from_backend")
|
||||
glance.store.safe_delete_from_backend(req.context, location, id)
|
||||
self.mox.StubOutWithMock(store_utils, "safe_delete_from_backend")
|
||||
store_utils.safe_delete_from_backend(req.context, id, location)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
upload_utils.initiate_deletion(req, location, id)
|
||||
@ -49,18 +51,21 @@ class TestUploadUtils(base.StoreClearingUnitTest):
|
||||
self.mox.VerifyAll()
|
||||
|
||||
def test_initiate_delete_with_delayed_delete(self):
|
||||
self.config(delayed_delete=True)
|
||||
req = unit_test_utils.get_fake_request()
|
||||
location = "file://foo/bar"
|
||||
location = {"url": "file://foo/bar",
|
||||
"metadata": {},
|
||||
"status": "active"}
|
||||
id = unit_test_utils.UUID1
|
||||
|
||||
self.mox.StubOutWithMock(glance.store,
|
||||
self.mox.StubOutWithMock(store_utils,
|
||||
"schedule_delayed_delete_from_backend")
|
||||
glance.store.schedule_delayed_delete_from_backend(req.context,
|
||||
location,
|
||||
id)
|
||||
ret = store_utils.schedule_delayed_delete_from_backend(req.context, id,
|
||||
location)
|
||||
ret.AndReturn(True)
|
||||
self.mox.ReplayAll()
|
||||
|
||||
upload_utils.initiate_deletion(req, location, id, True)
|
||||
upload_utils.initiate_deletion(req, location, id)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
@ -117,12 +122,12 @@ class TestUploadUtils(base.StoreClearingUnitTest):
|
||||
update_data).AndReturn(image_meta.update(update_data))
|
||||
self.mox.ReplayAll()
|
||||
|
||||
actual_meta, actual_loc, loc_meta = upload_utils.upload_data_to_store(
|
||||
actual_meta, location_data = upload_utils.upload_data_to_store(
|
||||
req, image_meta, image_data, store, notifier)
|
||||
|
||||
self.mox.VerifyAll()
|
||||
|
||||
self.assertEqual(actual_loc, location)
|
||||
self.assertEqual(location_data['url'], location)
|
||||
self.assertEqual(actual_meta, image_meta.update(update_data))
|
||||
|
||||
def test_upload_data_to_store_mismatch_size(self):
|
||||
@ -326,8 +331,9 @@ class TestUploadUtils(base.StoreClearingUnitTest):
|
||||
update_data
|
||||
).AndRaise(exception.NotFound)
|
||||
self.mox.StubOutWithMock(upload_utils, "initiate_deletion")
|
||||
upload_utils.initiate_deletion(req, location, image_meta['id'],
|
||||
mox.IsA(bool))
|
||||
upload_utils.initiate_deletion(req, {'url': location,
|
||||
'status': 'active',
|
||||
'metadata': {}}, image_meta['id'])
|
||||
self.mox.StubOutWithMock(upload_utils, "safe_kill")
|
||||
upload_utils.safe_kill(req, image_meta['id'])
|
||||
notifier.error('image.upload', mox.IgnoreArg())
|
||||
|
@ -103,7 +103,9 @@ class TestImagesController(base.StoreClearingUnitTest):
|
||||
|
||||
def test_download(self):
|
||||
request = unit_test_utils.get_fake_request()
|
||||
image = FakeImage('abcd', locations=['http://example.com/image'])
|
||||
image = FakeImage('abcd',
|
||||
locations=[{'url': 'http://example.com/image',
|
||||
'metadata': {}, 'status': 'active'}])
|
||||
self.image_repo.result = image
|
||||
image = self.controller.download(request, unit_test_utils.UUID1)
|
||||
self.assertEqual(image.image_id, 'abcd')
|
||||
|
@ -109,7 +109,7 @@ class TestImageMembersController(test_utils.BaseTestCase):
|
||||
_db_fixture(UUID1, owner=TENANT1, name='1', size=256,
|
||||
is_public=True,
|
||||
locations=[{'url': '%s/%s' % (BASE_URI, UUID1),
|
||||
'metadata': {}}]),
|
||||
'metadata': {}, 'status': 'active'}]),
|
||||
_db_fixture(UUID2, owner=TENANT1, name='2', size=512),
|
||||
_db_fixture(UUID3, owner=TENANT3, name='3', size=512),
|
||||
_db_fixture(UUID4, owner=TENANT4, name='4', size=1024),
|
||||
|
@ -118,12 +118,14 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
self.store = unit_test_utils.FakeStoreAPI()
|
||||
for i in range(1, 4):
|
||||
self.store.data['%s/fake_location_%i' % (BASE_URI, i)] = ('Z', 1)
|
||||
self.store_utils = unit_test_utils.FakeStoreUtils(self.store)
|
||||
self._create_images()
|
||||
self._create_image_members()
|
||||
self.controller = glance.api.v2.images.ImagesController(self.db,
|
||||
self.policy,
|
||||
self.notifier,
|
||||
self.store)
|
||||
self.controller.gateway.store_utils = self.store_utils
|
||||
glance.store.create_stores()
|
||||
|
||||
def _create_images(self):
|
||||
@ -133,7 +135,7 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
name='1', size=256, virtual_size=1024,
|
||||
is_public=True,
|
||||
locations=[{'url': '%s/%s' % (BASE_URI, UUID1),
|
||||
'metadata': {}}],
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
disk_format='raw',
|
||||
container_format='bare',
|
||||
status='active'),
|
||||
@ -1615,11 +1617,11 @@ class TestImagesController(base.IsolatedUnitTest):
|
||||
request, UUID1, changes)
|
||||
|
||||
def test_update_remove_location_store_exception(self):
|
||||
def fake_delete_image_from_backend(self, *args, **kwargs):
|
||||
def fake_delete_image_location_from_backend(self, *args, **kwargs):
|
||||
raise Exception('fake_backend_exception')
|
||||
|
||||
self.stubs.Set(glance.store, 'delete_image_from_backend',
|
||||
fake_delete_image_from_backend)
|
||||
self.stubs.Set(self.store_utils, 'delete_image_location_from_backend',
|
||||
fake_delete_image_location_from_backend)
|
||||
|
||||
request = unit_test_utils.get_fake_request()
|
||||
changes = [{'op': 'remove', 'path': ['locations', '0']}]
|
||||
@ -1784,6 +1786,8 @@ class TestImagesControllerPolicies(base.IsolatedUnitTest):
|
||||
self.policy = unit_test_utils.FakePolicyEnforcer()
|
||||
self.controller = glance.api.v2.images.ImagesController(self.db,
|
||||
self.policy)
|
||||
store = unit_test_utils.FakeStoreAPI()
|
||||
self.store_utils = unit_test_utils.FakeStoreUtils(store)
|
||||
|
||||
def test_index_unauthorized(self):
|
||||
rules = {"get_images": False}
|
||||
@ -1854,7 +1858,7 @@ class TestImagesControllerPolicies(base.IsolatedUnitTest):
|
||||
request, UUID1, changes)
|
||||
|
||||
def test_update_set_image_location_unauthorized(self):
|
||||
def fake_delete_image_from_backend(self, *args, **kwargs):
|
||||
def fake_delete_image_location_from_backend(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
rules = {"set_image_location": False}
|
||||
@ -1866,8 +1870,8 @@ class TestImagesControllerPolicies(base.IsolatedUnitTest):
|
||||
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
|
||||
request, UUID1, changes)
|
||||
|
||||
self.stubs.Set(glance.store, 'delete_image_from_backend',
|
||||
fake_delete_image_from_backend)
|
||||
self.stubs.Set(self.store_utils, 'delete_image_location_from_backend',
|
||||
fake_delete_image_location_from_backend)
|
||||
|
||||
changes = [{'op': 'replace', 'path': ['locations'], 'value': []}]
|
||||
self.controller.update(request, UUID1, changes)
|
||||
@ -3086,8 +3090,8 @@ class TestImagesSerializerDirectUrl(test_utils.BaseTestCase):
|
||||
UUID1, name='image-1', visibility='public',
|
||||
status='active', size=1024, virtual_size=3072,
|
||||
created_at=DATETIME, updated_at=DATETIME,
|
||||
locations=[{'url': 'http://some/fake/location',
|
||||
'metadata': {}}])
|
||||
locations=[{'id': '1', 'url': 'http://some/fake/location',
|
||||
'metadata': {}, 'status': 'active'}])
|
||||
|
||||
self.queued_image = _domain_fixture(
|
||||
UUID2, name='image-2', status='active',
|
||||
@ -3099,8 +3103,10 @@ class TestImagesSerializerDirectUrl(test_utils.BaseTestCase):
|
||||
self.location_data_image = _domain_fixture(
|
||||
UUID2, name='image-2', status='active',
|
||||
created_at=DATETIME, updated_at=DATETIME,
|
||||
locations=[{'url': self.location_data_image_url,
|
||||
'metadata': self.location_data_image_meta}])
|
||||
locations=[{'id': '2',
|
||||
'url': self.location_data_image_url,
|
||||
'metadata': self.location_data_image_meta,
|
||||
'status': 'active'}])
|
||||
|
||||
def _do_index(self):
|
||||
request = webob.Request.blank('/v2/images')
|
||||
|
@ -71,7 +71,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
'min_ram': 0,
|
||||
'size': 13,
|
||||
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID1),
|
||||
'metadata': {}}],
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
'properties': {'type': 'kernel'}},
|
||||
{'id': UUID2,
|
||||
'name': 'fake image #2',
|
||||
@ -88,7 +88,7 @@ class TestRegistryRPC(base.IsolatedUnitTest):
|
||||
'min_ram': 256,
|
||||
'size': 19,
|
||||
'locations': [{'url': "file:///%s/%s" % (self.test_dir, UUID2),
|
||||
'metadata': {}}],
|
||||
'metadata': {}, 'status': 'active'}],
|
||||
'properties': {}}]
|
||||
|
||||
self.context = glance.context.RequestContext(is_admin=True)
|
||||
|
Loading…
Reference in New Issue
Block a user