Add multi-store support

Made provision for multi-store support. Added new config option
'enabled_backends' which will be a comma separated Key:Value pair
of store identifier and store type.

DocImpact
Depends-On: https://review.openstack.org/573648
Implements: blueprint multi-store

Change-Id: I9cfa066bdce51619a78ce86a8b1f1f8d05e5bfb6
This commit is contained in:
Abhishek Kekane 2018-05-07 10:30:01 +00:00
parent 0b24dbd620
commit cb45edf5c8
22 changed files with 315 additions and 80 deletions

View File

@ -14,8 +14,10 @@
# limitations under the License. # limitations under the License.
from oslo_config import cfg from oslo_config import cfg
import webob.exc
from glance.common import wsgi from glance.common import wsgi
from glance.i18n import _
CONF = cfg.CONF CONF = cfg.CONF
@ -34,6 +36,27 @@ class InfoController(object):
'import-methods': import_methods 'import-methods': import_methods
} }
def get_stores(self, req):
# TODO(abhishekk): This will be removed after config options
# 'stores' and 'default_store' are removed.
enabled_backends = CONF.enabled_backends
if not enabled_backends:
msg = _("Multi backend is not supported at this site.")
raise webob.exc.HTTPNotFound(explanation=msg)
backends = []
for backend in enabled_backends:
stores = {}
stores['id'] = backend
description = getattr(CONF, backend).store_description
if description:
stores['description'] = description
if backend == CONF.glance_store.default_backend:
stores['default'] = "true"
backends.append(stores)
return {'stores': backends}
def create_resource(): def create_resource():
return wsgi.Resource(InfoController()) return wsgi.Resource(InfoController())

View File

@ -100,6 +100,18 @@ class ImageDataController(object):
@utils.mutating @utils.mutating
def upload(self, req, image_id, data, size): def upload(self, req, image_id, data, size):
backend = None
if CONF.enabled_backends:
backend = req.headers.get('x-image-meta-store',
CONF.glance_store.default_backend)
try:
glance_store.get_store_from_store_identifier(backend)
except glance_store.UnknownScheme as exc:
raise webob.exc.HTTPBadRequest(explanation=exc.msg,
request=req,
content_type='text/plain')
image_repo = self.gateway.get_repo(req.context) image_repo = self.gateway.get_repo(req.context)
image = None image = None
refresher = None refresher = None
@ -129,7 +141,7 @@ class ImageDataController(object):
encodeutils.exception_to_unicode(e)) encodeutils.exception_to_unicode(e))
image_repo.save(image, from_state='queued') image_repo.save(image, from_state='queued')
image.set_data(data, size) image.set_data(data, size, backend=backend)
try: try:
image_repo.save(image, from_state='saving') image_repo.save(image, from_state='saving')
@ -274,9 +286,16 @@ class ImageDataController(object):
# NOTE(jokke): this is horrible way to do it but as long as # NOTE(jokke): this is horrible way to do it but as long as
# glance_store is in a shape it is, the only way. Don't hold me # glance_store is in a shape it is, the only way. Don't hold me
# accountable for it. # accountable for it.
# TODO(abhishekk): After removal of backend module from glance_store
# need to change this to use multi_backend module.
def _build_staging_store(): def _build_staging_store():
conf = cfg.ConfigOpts() conf = cfg.ConfigOpts()
backend.register_opts(conf)
try:
backend.register_opts(conf)
except cfg.DuplicateOptError:
pass
conf.set_override('filesystem_store_datadir', conf.set_override('filesystem_store_datadir',
CONF.node_staging_uri[7:], CONF.node_staging_uri[7:],
group='glance_store') group='glance_store')

View File

@ -94,10 +94,6 @@ class ImagesController(object):
task_factory = self.gateway.get_task_factory(req.context) task_factory = self.gateway.get_task_factory(req.context)
executor_factory = self.gateway.get_task_executor_factory(req.context) executor_factory = self.gateway.get_task_executor_factory(req.context)
task_repo = self.gateway.get_task_repo(req.context) task_repo = self.gateway.get_task_repo(req.context)
task_input = {'image_id': image_id,
'import_req': body}
import_method = body.get('method').get('name') import_method = body.get('method').get('name')
uri = body.get('method').get('uri') uri = body.get('method').get('uri')
@ -121,11 +117,26 @@ class ImagesController(object):
if not getattr(image, 'disk_format', None): if not getattr(image, 'disk_format', None):
msg = _("'disk_format' needs to be set before import") msg = _("'disk_format' needs to be set before import")
raise exception.Conflict(msg) raise exception.Conflict(msg)
backend = None
if CONF.enabled_backends:
backend = req.headers.get('x-image-meta-store',
CONF.glance_store.default_backend)
try:
glance_store.get_store_from_store_identifier(backend)
except glance_store.UnknownScheme:
msg = _("Store for scheme %s not found") % backend
LOG.warn(msg)
raise exception.Conflict(msg)
except exception.Conflict as e: except exception.Conflict as e:
raise webob.exc.HTTPConflict(explanation=e.msg) raise webob.exc.HTTPConflict(explanation=e.msg)
except exception.NotFound as e: except exception.NotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.msg) raise webob.exc.HTTPNotFound(explanation=e.msg)
task_input = {'image_id': image_id,
'import_req': body,
'backend': backend}
if (import_method == 'web-download' and if (import_method == 'web-download' and
not utils.validate_import_uri(uri)): not utils.validate_import_uri(uri)):
LOG.debug("URI for web-download does not pass filtering: %s", LOG.debug("URI for web-download does not pass filtering: %s",
@ -324,7 +335,10 @@ class ImagesController(object):
if image.status == 'uploading': if image.status == 'uploading':
file_path = str(CONF.node_staging_uri + '/' + image.image_id) file_path = str(CONF.node_staging_uri + '/' + image.image_id)
self.store_api.delete_from_backend(file_path) if CONF.enabled_backends:
self.store_api.delete(file_path, None)
else:
self.store_api.delete_from_backend(file_path)
image.delete() image.delete()
image_repo.remove(image) image_repo.remove(image)
@ -926,6 +940,20 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
image_view['file'] = self._get_image_href(image, 'file') image_view['file'] = self._get_image_href(image, 'file')
image_view['schema'] = '/v2/schemas/image' image_view['schema'] = '/v2/schemas/image'
image_view = self.schema.filter(image_view) # domain image_view = self.schema.filter(image_view) # domain
# add store information to image
if CONF.enabled_backends:
locations = _get_image_locations(image)
if locations:
stores = []
for loc in locations:
backend = loc['metadata'].get('backend')
if backend:
stores.append(backend)
if stores:
image_view['stores'] = ",".join(stores)
return image_view return image_view
except exception.Forbidden as e: except exception.Forbidden as e:
raise webob.exc.HTTPForbidden(explanation=e.msg) raise webob.exc.HTTPForbidden(explanation=e.msg)
@ -941,6 +969,11 @@ class ResponseSerializer(wsgi.JSONResponseSerializer):
','.join(CONF.enabled_import_methods)) ','.join(CONF.enabled_import_methods))
response.headerlist.append(import_methods) response.headerlist.append(import_methods)
if CONF.enabled_backends:
enabled_backends = ("OpenStack-image-store-ids",
','.join(CONF.enabled_backends.keys()))
response.headerlist.append(enabled_backends)
def show(self, response, image): def show(self, response, image):
image_view = self._format_image(image) image_view = self._format_image(image)
body = json.dumps(image_view, ensure_ascii=False) body = json.dumps(image_view, ensure_ascii=False)
@ -1107,6 +1140,11 @@ def get_base_properties():
'readOnly': True, 'readOnly': True,
'description': _('An image file url'), 'description': _('An image file url'),
}, },
'backend': {
'type': 'string',
'readOnly': True,
'description': _('Backend store to upload image to'),
},
'schema': { 'schema': {
'type': 'string', 'type': 'string',
'readOnly': True, 'readOnly': True,

View File

@ -565,5 +565,14 @@ class API(wsgi.Router):
controller=reject_method_resource, controller=reject_method_resource,
action='reject', action='reject',
allowed_methods='GET') allowed_methods='GET')
mapper.connect('/info/stores',
controller=info_resource,
action='get_stores',
conditions={'method': ['GET']},
body_reject=True)
mapper.connect('/info/stores',
controller=reject_method_resource,
action='reject',
allowed_methods='GET')
super(API, self).__init__(mapper) super(API, self).__init__(mapper)

View File

@ -61,8 +61,14 @@ class _WebDownload(task.Task):
# glance_store refactor is done. A good thing is that glance_store is # glance_store refactor is done. A good thing is that glance_store is
# under our team's management and it gates on Glance so changes to # under our team's management and it gates on Glance so changes to
# this API will (should?) break task's tests. # this API will (should?) break task's tests.
# TODO(abhishekk): After removal of backend module from glance_store
# need to change this to use multi_backend module.
conf = cfg.ConfigOpts() conf = cfg.ConfigOpts()
backend.register_opts(conf) try:
backend.register_opts(conf)
except cfg.DuplicateOptError:
pass
conf.set_override('filesystem_store_datadir', conf.set_override('filesystem_store_datadir',
CONF.node_staging_uri[7:], CONF.node_staging_uri[7:],
group='glance_store') group='glance_store')

View File

@ -86,7 +86,10 @@ class _DeleteFromFS(task.Task):
:param file_path: path to the file being deleted :param file_path: path to the file being deleted
""" """
store_api.delete_from_backend(file_path) if CONF.enabled_backends:
store_api.delete(file_path, None)
else:
store_api.delete_from_backend(file_path)
class _VerifyStaging(task.Task): class _VerifyStaging(task.Task):
@ -122,6 +125,8 @@ class _VerifyStaging(task.Task):
self._build_store() self._build_store()
def _build_store(self): def _build_store(self):
# TODO(abhishekk): After removal of backend module from glance_store
# need to change this to use multi_backend module.
# NOTE(jokke): If we want to use some other store for staging, we can # NOTE(jokke): If we want to use some other store for staging, we can
# implement the logic more general here. For now this should do. # implement the logic more general here. For now this should do.
# NOTE(flaper87): Due to the nice glance_store api (#sarcasm), we're # NOTE(flaper87): Due to the nice glance_store api (#sarcasm), we're
@ -133,7 +138,10 @@ class _VerifyStaging(task.Task):
# under our team's management and it gates on Glance so changes to # under our team's management and it gates on Glance so changes to
# this API will (should?) break task's tests. # this API will (should?) break task's tests.
conf = cfg.ConfigOpts() conf = cfg.ConfigOpts()
backend.register_opts(conf) try:
backend.register_opts(conf)
except cfg.DuplicateOptError:
pass
conf.set_override('filesystem_store_datadir', conf.set_override('filesystem_store_datadir',
CONF.node_staging_uri[7:], CONF.node_staging_uri[7:],
group='glance_store') group='glance_store')
@ -159,12 +167,13 @@ class _VerifyStaging(task.Task):
class _ImportToStore(task.Task): class _ImportToStore(task.Task):
def __init__(self, task_id, task_type, image_repo, uri, image_id): def __init__(self, task_id, task_type, image_repo, uri, image_id, backend):
self.task_id = task_id self.task_id = task_id
self.task_type = task_type self.task_type = task_type
self.image_repo = image_repo self.image_repo = image_repo
self.uri = uri self.uri = uri
self.image_id = image_id self.image_id = image_id
self.backend = backend
super(_ImportToStore, self).__init__( super(_ImportToStore, self).__init__(
name='%s-ImportToStore-%s' % (task_type, task_id)) name='%s-ImportToStore-%s' % (task_type, task_id))
@ -215,7 +224,8 @@ class _ImportToStore(task.Task):
# will need the file path anyways for our delete workflow for now. # will need the file path anyways for our delete workflow for now.
# For future proofing keeping this as is. # For future proofing keeping this as is.
image = self.image_repo.get(self.image_id) image = self.image_repo.get(self.image_id)
image_import.set_image_data(image, file_path or self.uri, self.task_id) image_import.set_image_data(image, file_path or self.uri, self.task_id,
backend=self.backend)
# NOTE(flaper87): We need to save the image again after the locations # NOTE(flaper87): We need to save the image again after the locations
# have been set in the image. # have been set in the image.
@ -306,6 +316,7 @@ def get_flow(**kwargs):
image_id = kwargs.get('image_id') image_id = kwargs.get('image_id')
import_method = kwargs.get('import_req')['method']['name'] import_method = kwargs.get('import_req')['method']['name']
uri = kwargs.get('import_req')['method'].get('uri') uri = kwargs.get('import_req')['method'].get('uri')
backend = kwargs.get('backend')
separator = '' separator = ''
if not CONF.node_staging_uri.endswith('/'): if not CONF.node_staging_uri.endswith('/'):
@ -332,7 +343,8 @@ def get_flow(**kwargs):
task_type, task_type,
image_repo, image_repo,
file_uri, file_uri,
image_id) image_id,
backend)
flow.add(import_to_store) flow.add(import_to_store)
delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type)) delete_task = lf.Flow(task_type).add(_DeleteFromFS(task_id, task_type))

View File

@ -129,6 +129,7 @@ class TaskExecutor(glance.async.TaskExecutor):
if task.type == 'api_image_import': if task.type == 'api_image_import':
kwds['image_id'] = task_input['image_id'] kwds['image_id'] = task_input['image_id']
kwds['import_req'] = task_input['import_req'] kwds['import_req'] = task_input['import_req']
kwds['backend'] = task_input['backend']
return driver.DriverManager('glance.flows', task.type, return driver.DriverManager('glance.flows', task.type,
invoke_on_load=True, invoke_on_load=True,
invoke_kwds=kwds).driver invoke_kwds=kwds).driver

View File

@ -137,13 +137,13 @@ def create_image(image_repo, image_factory, image_properties, task_id):
return image return image
def set_image_data(image, uri, task_id): def set_image_data(image, uri, task_id, backend=None):
data_iter = None data_iter = None
try: try:
LOG.info(_LI("Task %(task_id)s: Got image data uri %(data_uri)s to be " LOG.info(_LI("Task %(task_id)s: Got image data uri %(data_uri)s to be "
"imported"), {"data_uri": uri, "task_id": task_id}) "imported"), {"data_uri": uri, "task_id": task_id})
data_iter = script_utils.get_image_data_iter(uri) data_iter = script_utils.get_image_data_iter(uri)
image.set_data(data_iter) image.set_data(data_iter, backend=backend)
except Exception as e: except Exception as e:
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
LOG.warn(_LW("Task %(task_id)s failed with exception %(error)s") % LOG.warn(_LW("Task %(task_id)s failed with exception %(error)s") %

View File

@ -46,7 +46,15 @@ def safe_delete_from_backend(context, image_id, location):
""" """
try: try:
ret = store_api.delete_from_backend(location['url'], context=context) if CONF.enabled_backends:
backend = location['metadata'].get('backend')
ret = store_api.delete(location['url'],
backend,
context=context)
else:
ret = store_api.delete_from_backend(location['url'],
context=context)
location['status'] = 'deleted' location['status'] = 'deleted'
if 'id' in location: if 'id' in location:
db_api.get_api().image_location_delete(context, image_id, db_api.get_api().image_location_delete(context, image_id,
@ -133,5 +141,9 @@ def validate_external_location(uri):
# TODO(zhiyan): This function could be moved to glance_store. # TODO(zhiyan): This function could be moved to glance_store.
# TODO(gm): Use a whitelist of allowed schemes # TODO(gm): Use a whitelist of allowed schemes
scheme = urlparse.urlparse(uri).scheme scheme = urlparse.urlparse(uri).scheme
return (scheme in store_api.get_known_schemes() and known_schemes = store_api.get_known_schemes()
if CONF.enabled_backends:
known_schemes = store_api.get_known_schemes_for_multi_store()
return (scheme in known_schemes and
scheme not in RESTRICTED_URI_SCHEMAS) scheme not in RESTRICTED_URI_SCHEMAS)

View File

@ -317,6 +317,13 @@ wsgi_opts = [
'"HTTP_X_FORWARDED_PROTO".')), '"HTTP_X_FORWARDED_PROTO".')),
] ]
store_opts = [
cfg.DictOpt('enabled_backends',
help=_('Key:Value pair of store identifier and store type. '
'In case of multiple backends should be separated'
'using comma.')),
]
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -325,6 +332,7 @@ CONF.register_opts(bind_opts)
CONF.register_opts(socket_opts) CONF.register_opts(socket_opts)
CONF.register_opts(eventlet_opts) CONF.register_opts(eventlet_opts)
CONF.register_opts(wsgi_opts) CONF.register_opts(wsgi_opts)
CONF.register_opts(store_opts)
profiler_opts.set_defaults(CONF) profiler_opts.set_defaults(CONF)
ASYNC_EVENTLET_THREAD_POOL_LIST = [] ASYNC_EVENTLET_THREAD_POOL_LIST = []
@ -448,6 +456,13 @@ def initialize_glance_store():
glance_store.verify_default_store() glance_store.verify_default_store()
def initialize_multi_store():
"""Initialize glance multi store backends."""
glance_store.register_store_opts(CONF)
glance_store.create_multi_stores(CONF)
glance_store.verify_store()
def get_asynchronous_eventlet_pool(size=1000): def get_asynchronous_eventlet_pool(size=1000):
"""Return eventlet pool to caller. """Return eventlet pool to caller.
@ -599,7 +614,10 @@ class Server(object):
self.client_socket_timeout = CONF.client_socket_timeout or None self.client_socket_timeout = CONF.client_socket_timeout or None
self.configure_socket(old_conf, has_changed) self.configure_socket(old_conf, has_changed)
if self.initialize_glance_store: if self.initialize_glance_store:
initialize_glance_store() if CONF.enabled_backends:
initialize_multi_store()
else:
initialize_glance_store()
def reload(self): def reload(self):
""" """

View File

@ -22,6 +22,7 @@ from glance import notifier
CONF = cfg.CONF CONF = cfg.CONF
CONF.import_group("profiler", "glance.common.wsgi") CONF.import_group("profiler", "glance.common.wsgi")
CONF.import_opt("enabled_backends", "glance.common.wsgi")
logging.register_options(CONF) logging.register_options(CONF)
CONFIG_FILES = ['glance-api-paste.ini', CONFIG_FILES = ['glance-api-paste.ini',
@ -60,8 +61,15 @@ def init_app():
config_files = _get_config_files() config_files = _get_config_files()
CONF([], project='glance', default_config_files=config_files) CONF([], project='glance', default_config_files=config_files)
logging.setup(CONF, "glance") logging.setup(CONF, "glance")
glance_store.register_opts(CONF)
glance_store.create_stores(CONF) if CONF.enabled_backends:
glance_store.verify_default_store() glance_store.register_store_opts(CONF)
glance_store.create_multi_stores(CONF)
glance_store.verify_store()
else:
glance_store.register_opts(CONF)
glance_store.create_stores(CONF)
glance_store.verify_default_store()
_setup_os_profiler() _setup_os_profiler()
return config.load_paste_app('glance-api') return config.load_paste_app('glance-api')

View File

@ -283,7 +283,7 @@ class Image(object):
def get_data(self, *args, **kwargs): def get_data(self, *args, **kwargs):
raise NotImplementedError() raise NotImplementedError()
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
raise NotImplementedError() raise NotImplementedError()

View File

@ -194,8 +194,8 @@ class Image(object):
def reactivate(self): def reactivate(self):
self.base.reactivate() self.base.reactivate()
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
self.base.set_data(data, size) self.base.set_data(data, size, backend=backend)
def get_data(self, *args, **kwargs): def get_data(self, *args, **kwargs):
return self.base.get_data(*args, **kwargs) return self.base.get_data(*args, **kwargs)

View File

@ -59,9 +59,16 @@ class ImageRepoProxy(glance.domain.proxy.Repo):
self.store_api) self.store_api)
member_ids = [m.member_id for m in member_repo.list()] member_ids = [m.member_id for m in member_repo.list()]
for location in image.locations: for location in image.locations:
self.store_api.set_acls(location['url'], public=public, if CONF.enabled_backends:
read_tenants=member_ids, self.store_api.set_acls_for_multi_store(
context=self.context) location['url'], location['metadata']['backend'],
public=public, read_tenants=member_ids,
context=self.context
)
else:
self.store_api.set_acls(location['url'], public=public,
read_tenants=member_ids,
context=self.context)
def add(self, image): def add(self, image):
result = super(ImageRepoProxy, self).add(image) result = super(ImageRepoProxy, self).add(image)
@ -82,19 +89,28 @@ def _get_member_repo_for_store(image, context, db_api, store_api):
return store_image_repo return store_image_repo
def _check_location_uri(context, store_api, store_utils, uri): def _check_location_uri(context, store_api, store_utils, uri,
backend=None):
"""Check if an image location is valid. """Check if an image location is valid.
:param context: Glance request context :param context: Glance request context
:param store_api: store API module :param store_api: store API module
:param store_utils: store utils module :param store_utils: store utils module
:param uri: location's uri string :param uri: location's uri string
:param backend: A backend name for the store
""" """
try: try:
# NOTE(zhiyan): Some stores return zero when it catch exception # NOTE(zhiyan): Some stores return zero when it catch exception
if CONF.enabled_backends:
size_from_backend = store_api.get_size_from_uri_and_backend(
uri, backend, context=context)
else:
size_from_backend = store_api.get_size_from_backend(
uri, context=context)
is_ok = (store_utils.validate_external_location(uri) and is_ok = (store_utils.validate_external_location(uri) and
store_api.get_size_from_backend(uri, context=context) > 0) size_from_backend > 0)
except (store.UnknownScheme, store.NotFound, store.BadStoreUri): except (store.UnknownScheme, store.NotFound, store.BadStoreUri):
is_ok = False is_ok = False
if not is_ok: if not is_ok:
@ -103,15 +119,25 @@ def _check_location_uri(context, store_api, store_utils, uri):
def _check_image_location(context, store_api, store_utils, location): def _check_image_location(context, store_api, store_utils, location):
_check_location_uri(context, store_api, store_utils, location['url']) backend = None
if CONF.enabled_backends:
backend = location['metadata'].get('backend')
_check_location_uri(context, store_api, store_utils, location['url'],
backend=backend)
store_api.check_location_metadata(location['metadata']) store_api.check_location_metadata(location['metadata'])
def _set_image_size(context, image, locations): def _set_image_size(context, image, locations):
if not image.size: if not image.size:
for location in locations: for location in locations:
size_from_backend = store.get_size_from_backend( if CONF.enabled_backends:
location['url'], context=context) size_from_backend = store.get_size_from_uri_and_backend(
location['url'], location['metadata'].get('backend'),
context=context)
else:
size_from_backend = store.get_size_from_backend(
location['url'], context=context)
if size_from_backend: if size_from_backend:
# NOTE(flwang): This assumes all locations have the same size # NOTE(flwang): This assumes all locations have the same size
@ -404,7 +430,7 @@ class ImageProxy(glance.domain.proxy.Image):
self.image.image_id, self.image.image_id,
location) location)
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
if size is None: if size is None:
size = 0 # NOTE(markwash): zero -> unknown size size = 0 # NOTE(markwash): zero -> unknown size
@ -429,20 +455,32 @@ class ImageProxy(glance.domain.proxy.Image):
verifier = None verifier = None
hashing_algo = CONF['hashing_algorithm'] hashing_algo = CONF['hashing_algorithm']
if CONF.enabled_backends:
(location, (location, size, checksum,
size, multihash, loc_meta) = self.store_api.add_with_multihash(
checksum, CONF,
multihash, self.image.image_id,
loc_meta) = self.store_api.add_to_backend_with_multihash( utils.LimitingReader(utils.CooperativeReader(data),
CONF, CONF.image_size_cap),
self.image.image_id, size,
utils.LimitingReader(utils.CooperativeReader(data), backend,
CONF.image_size_cap), hashing_algo,
size, context=self.context,
hashing_algo, verifier=verifier)
context=self.context, else:
verifier=verifier) (location,
size,
checksum,
multihash,
loc_meta) = self.store_api.add_to_backend_with_multihash(
CONF,
self.image.image_id,
utils.LimitingReader(utils.CooperativeReader(data),
CONF.image_size_cap),
size,
hashing_algo,
context=self.context,
verifier=verifier)
# NOTE(bpoulos): if verification fails, exception will be raised # NOTE(bpoulos): if verification fails, exception will be raised
if verifier: if verifier:
@ -451,8 +489,12 @@ class ImageProxy(glance.domain.proxy.Image):
LOG.info(_LI("Successfully verified signature for image %s"), LOG.info(_LI("Successfully verified signature for image %s"),
self.image.image_id) self.image.image_id)
except crypto_exception.InvalidSignature: except crypto_exception.InvalidSignature:
self.store_api.delete_from_backend(location, if CONF.enabled_backends:
context=self.context) self.store_api.delete(location, loc_meta.get('backend'),
context=self.context)
else:
self.store_api.delete_from_backend(location,
context=self.context)
raise cursive_exception.SignatureVerificationError( raise cursive_exception.SignatureVerificationError(
_('Signature verification failed') _('Signature verification failed')
) )
@ -476,11 +518,18 @@ class ImageProxy(glance.domain.proxy.Image):
err = None err = None
for loc in self.image.locations: for loc in self.image.locations:
try: try:
data, size = self.store_api.get_from_backend( backend = loc['metadata'].get('backend')
loc['url'], if CONF.enabled_backends:
offset=offset, data, size = self.store_api.get(
chunk_size=chunk_size, loc['url'], backend, offset=offset,
context=self.context) chunk_size=chunk_size, context=self.context
)
else:
data, size = self.store_api.get_from_backend(
loc['url'],
offset=offset,
chunk_size=chunk_size,
context=self.context)
return data return data
except Exception as e: except Exception as e:
@ -490,8 +539,9 @@ class ImageProxy(glance.domain.proxy.Image):
'err': encodeutils.exception_to_unicode(e)}) 'err': encodeutils.exception_to_unicode(e)})
err = e err = e
# tried all locations # tried all locations
LOG.error(_LE('Glance tried all active locations to get data for ' LOG.error(_LE(
'image %s but all have failed.') % self.image.image_id) 'Glance tried all active locations/stores to get data '
'for image %s but all have failed.') % self.image.image_id)
raise err raise err

View File

@ -315,11 +315,16 @@ class NotificationBase(object):
def get_payload(self, obj): def get_payload(self, obj):
return {} return {}
def send_notification(self, notification_id, obj, extra_payload=None): def send_notification(self, notification_id, obj, extra_payload=None,
backend=None):
payload = self.get_payload(obj) payload = self.get_payload(obj)
if extra_payload is not None: if extra_payload is not None:
payload.update(extra_payload) payload.update(extra_payload)
# update backend information in the notification
if backend:
payload["backend"] = backend
_send_notification(self.notifier.info, notification_id, payload) _send_notification(self.notifier.info, notification_id, payload)
@ -419,12 +424,12 @@ class ImageProxy(NotificationProxy, domain_proxy.Image):
data = self.repo.get_data(offset=offset, chunk_size=chunk_size) data = self.repo.get_data(offset=offset, chunk_size=chunk_size)
return self._get_chunk_data_iterator(data, chunk_size=chunk_size) return self._get_chunk_data_iterator(data, chunk_size=chunk_size)
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
self.send_notification('image.prepare', self.repo) self.send_notification('image.prepare', self.repo, backend=backend)
notify_error = self.notifier.error notify_error = self.notifier.error
try: try:
self.repo.set_data(data, size) self.repo.set_data(data, size, backend=backend)
except glance_store.StorageFull as e: except glance_store.StorageFull as e:
msg = (_("Image storage media is full: %s") % msg = (_("Image storage media is full: %s") %
encodeutils.exception_to_unicode(e)) encodeutils.exception_to_unicode(e))

View File

@ -57,8 +57,13 @@ def _calc_required_size(context, image, locations):
size_from_backend = None size_from_backend = None
try: try:
size_from_backend = store.get_size_from_backend( if CONF.enabled_backends:
location['url'], context=context) size_from_backend = store.get_size_from_uri_and_backend(
location['url'], location['metadata'].get('backend'),
context=context)
else:
size_from_backend = store.get_size_from_backend(
location['url'], context=context)
except (store.UnknownScheme, store.NotFound): except (store.UnknownScheme, store.NotFound):
pass pass
except store.BadStoreUri: except store.BadStoreUri:
@ -293,7 +298,7 @@ class ImageProxy(glance.domain.proxy.Image):
super(ImageProxy, self).__init__(image) super(ImageProxy, self).__init__(image)
self.orig_props = set(image.extra_properties.keys()) self.orig_props = set(image.extra_properties.keys())
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
remaining = glance.api.common.check_quota( remaining = glance.api.common.check_quota(
self.context, size, self.db_api, image_id=self.image.image_id) self.context, size, self.db_api, image_id=self.image.image_id)
if remaining is not None: if remaining is not None:
@ -302,7 +307,7 @@ class ImageProxy(glance.domain.proxy.Image):
data = utils.LimitingReader( data = utils.LimitingReader(
data, remaining, exception_class=exception.StorageQuotaFull) data, remaining, exception_class=exception.StorageQuotaFull)
self.image.set_data(data, size=size) self.image.set_data(data, size=size, backend=backend)
# NOTE(jbresnah) If two uploads happen at the same time and neither # NOTE(jbresnah) If two uploads happen at the same time and neither
# properly sets the size attribute[1] then there is a race condition # properly sets the size attribute[1] then there is a race condition

View File

@ -253,7 +253,13 @@ class ScrubDBQueue(object):
else: else:
uri = loc['url'] uri = loc['url']
ret.append((image['id'], loc['id'], uri)) # if multi-store is enabled then we need to pass backend
# to delete the image.
backend = loc['metadata'].get('backend')
if CONF.enabled_backends:
ret.append((image['id'], loc['id'], uri, backend))
else:
ret.append((image['id'], loc['id'], uri))
return ret return ret
def has_image(self, image_id): def has_image(self, image_id):
@ -327,10 +333,18 @@ class Scrubber(object):
raise exception.FailedToGetScrubberJobs() raise exception.FailedToGetScrubberJobs()
delete_jobs = {} delete_jobs = {}
for image_id, loc_id, loc_uri in records: if CONF.enabled_backends:
if image_id not in delete_jobs: for image_id, loc_id, loc_uri, backend in records:
delete_jobs[image_id] = [] if image_id not in delete_jobs:
delete_jobs[image_id].append((image_id, loc_id, loc_uri)) delete_jobs[image_id] = []
delete_jobs[image_id].append((image_id, loc_id,
loc_uri, backend))
else:
for image_id, loc_id, loc_uri in records:
if image_id not in delete_jobs:
delete_jobs[image_id] = []
delete_jobs[image_id].append((image_id, loc_id, loc_uri))
return delete_jobs return delete_jobs
def run(self, event=None): def run(self, event=None):
@ -347,11 +361,21 @@ class Scrubber(object):
{'id': image_id, 'count': len(delete_jobs)}) {'id': image_id, 'count': len(delete_jobs)})
success = True success = True
for img_id, loc_id, uri in delete_jobs: if CONF.enabled_backends:
try: for img_id, loc_id, uri, backend in delete_jobs:
self._delete_image_location_from_backend(img_id, loc_id, uri) try:
except Exception: self._delete_image_location_from_backend(img_id, loc_id,
success = False uri,
backend=backend)
except Exception:
success = False
else:
for img_id, loc_id, uri in delete_jobs:
try:
self._delete_image_location_from_backend(img_id, loc_id,
uri)
except Exception:
success = False
if success: if success:
image = db_api.get_api().image_get(self.admin_context, image_id) image = db_api.get_api().image_get(self.admin_context, image_id)
@ -364,11 +388,15 @@ class Scrubber(object):
"from backend. Leaving image '%s' in 'pending_delete'" "from backend. Leaving image '%s' in 'pending_delete'"
" status") % image_id) " status") % image_id)
def _delete_image_location_from_backend(self, image_id, loc_id, uri): def _delete_image_location_from_backend(self, image_id, loc_id, uri,
backend=None):
try: try:
LOG.debug("Scrubbing image %s from a location.", image_id) LOG.debug("Scrubbing image %s from a location.", image_id)
try: try:
self.store_api.delete_from_backend(uri, self.admin_context) if CONF.enabled_backends:
self.store_api.delete(uri, backend, self.admin_context)
else:
self.store_api.delete_from_backend(uri, self.admin_context)
except store_exceptions.NotFound: except store_exceptions.NotFound:
LOG.info(_LI("Image location for image '%s' not found in " LOG.info(_LI("Image location for image '%s' not found in "
"backend; Marking image location deleted in " "backend; Marking image location deleted in "

View File

@ -58,6 +58,7 @@ class TestSchemas(functional.FunctionalTest):
'min_disk', 'min_disk',
'protected', 'protected',
'os_hidden', 'os_hidden',
'backend'
]) ])
self.assertEqual(expected, set(image_schema['properties'].keys())) self.assertEqual(expected, set(image_schema['properties'].keys()))

View File

@ -44,7 +44,7 @@ class ImageStub(glance.domain.Image):
def get_data(self, offset=0, chunk_size=None): def get_data(self, offset=0, chunk_size=None):
return ['01234', '56789'] return ['01234', '56789']
def set_data(self, data, size=None): def set_data(self, data, size, backend=None):
for chunk in data: for chunk in data:
pass pass

View File

@ -42,7 +42,7 @@ class FakeImage(object):
locations = [{'url': 'file:///not/a/path', 'metadata': {}}] locations = [{'url': 'file:///not/a/path', 'metadata': {}}]
tags = set([]) tags = set([])
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
self.size = 0 self.size = 0
for d in data: for d in data:
self.size += len(d) self.size += len(d)

View File

@ -69,7 +69,7 @@ class FakeImage(object):
return self.data[offset:offset + chunk_size] return self.data[offset:offset + chunk_size]
return self.data[offset:] return self.data[offset:]
def set_data(self, data, size=None): def set_data(self, data, size=None, backend=None):
self.data = ''.join(data) self.data = ''.join(data)
self.size = size self.size = size
self.status = 'modified-by-fake' self.status = 'modified-by-fake'

View File

@ -34,7 +34,7 @@ class TestSchemasController(test_utils.BaseTestCase):
'file', 'container_format', 'schema', 'id', 'size', 'file', 'container_format', 'schema', 'id', 'size',
'direct_url', 'min_ram', 'min_disk', 'protected', 'direct_url', 'min_ram', 'min_disk', 'protected',
'locations', 'owner', 'virtual_size', 'os_hidden', 'locations', 'owner', 'virtual_size', 'os_hidden',
'os_hash_algo', 'os_hash_value']) 'os_hash_algo', 'os_hash_value', 'backend'])
self.assertEqual(expected, set(output['properties'].keys())) self.assertEqual(expected, set(output['properties'].keys()))
def test_image_has_correct_statuses(self): def test_image_has_correct_statuses(self):