Add capabilities to storage driver

Storage capabilities is used to indicate the static and dynamic ability
of the storage driver object based on current driver implementation or
particular driver configuration and backend status.

Use storage capabilities glance_store can do more proper operations on
backend to support upper layer request, like to enable or disable add()
function to glance, or if allow glance reuse driver instance for all
request according to whether the driver and/or backend is stateless.

This patch implemented some initial capabilities for existing drivers,
and change the foundational code to make them be aware. Mainly it
contains:

1. Implemented essential code to enable driver capabilities, adding
necessary capabilities.

2. Added a generic checker on necessary storage operations, to make sure
the capabilities of the driver are capable of handling requested
operation. We can enhance the check logic as needed easily in future.

3. Added a callback based schedule logic to update dynamic capabilities
of store when operator enabled it by a option.

4. Refactoring on existing disablement logic on driver add() interface,
to use consistent capabilities way to handle it, removed add_disabled().

5. Therefor the related exception conversion logic for other interfaces
are redundant, due to now we can raise proper exception directly from
the checker.

6. Added the logic to recreate drive object if the storage and/or driver
isn't stateless.

Few minor changes need to be added to Glance side:
Change Ibbc85b6bc2ea98c564d316db2874d7df5aac32a6 .

docImpact
Implements: blueprint store-capabilities

Change-Id: Iedf0d4f829e46ca64c3f4fc6a7dfee54d9b0605b
Signed-off-by: Zhi Yan Liu <zhiyanl@cn.ibm.com>
This commit is contained in:
Zhi Yan Liu 2014-11-21 22:05:49 +08:00
parent 36e293b5d5
commit 138875b7c3
25 changed files with 551 additions and 61 deletions

View File

@ -19,6 +19,7 @@ from cinderclient import service_catalog
from cinderclient.v2 import client as cinderclient
from oslo_config import cfg
from glance_store import capabilities
from glance_store.common import utils
import glance_store.driver
from glance_store import exceptions
@ -130,6 +131,7 @@ class Store(glance_store.driver.Store):
"""Cinder backend store adapter."""
_CAPABILITIES = capabilities.DRIVER_REUSABLE
OPTIONS = _CINDER_OPTS
EXAMPLE_URL = "cinder://<VOLUME_ID>"

View File

@ -32,6 +32,7 @@ from oslo_utils import excutils
from oslo_utils import units
import glance_store
from glance_store import capabilities
from glance_store.common import utils
import glance_store.driver
from glance_store import exceptions
@ -148,6 +149,9 @@ class ChunkedFile(object):
class Store(glance_store.driver.Store):
_CAPABILITIES = (capabilities.READ_RANDOM |
capabilities.WRITE_ACCESS |
capabilities.DRIVER_REUSABLE)
OPTIONS = _FILESYSTEM_CONFIGS
READ_CHUNKSIZE = 64 * units.Ki
WRITE_CHUNKSIZE = READ_CHUNKSIZE
@ -431,6 +435,7 @@ class Store(glance_store.driver.Store):
return {}
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -465,6 +470,7 @@ class Store(glance_store.driver.Store):
LOG.debug(msg)
return filesize
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -535,6 +541,7 @@ class Store(glance_store.driver.Store):
return best_datadir
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend

View File

@ -22,6 +22,7 @@ import urlparse
from oslo_config import cfg
from oslo_utils import excutils
from glance_store import capabilities
import glance_store.driver
from glance_store import exceptions
from glance_store.i18n import _
@ -80,6 +81,7 @@ class StoreLocation(glance_store.location.StoreLocation):
class Store(glance_store.driver.Store):
"""GridFS adapter"""
_CAPABILITIES = capabilities.RW_ACCESS
OPTIONS = _GRIDFS_OPTS
EXAMPLE_URL = "gridfs://<IMAGE_ID>"
@ -117,6 +119,7 @@ class Store(glance_store.driver.Store):
reason=reason)
return result
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -159,6 +162,7 @@ class Store(glance_store.driver.Store):
LOG.debug(msg)
raise exceptions.NotFound(msg)
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend
@ -199,6 +203,7 @@ class Store(glance_store.driver.Store):
return (loc.get_uri(), image.length, image.md5, {})
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -18,6 +18,7 @@ import logging
import socket
import urlparse
from glance_store import capabilities
import glance_store.driver
from glance_store import exceptions
from glance_store.i18n import _
@ -111,6 +112,10 @@ class Store(glance_store.driver.Store):
"""An implementation of the HTTP(S) Backend Adapter"""
_CAPABILITIES = (capabilities.READ_ACCESS |
capabilities.DRIVER_REUSABLE)
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -25,6 +25,7 @@ import urllib
from oslo_config import cfg
from glance_store import capabilities
from glance_store.common import utils
from glance_store import driver
from glance_store import exceptions
@ -175,6 +176,7 @@ class ImageIterator(object):
class Store(driver.Store):
"""An implementation of the RBD backend adapter."""
_CAPABILITIES = capabilities.RW_ACCESS
OPTIONS = _RBD_OPTS
EXAMPLE_URL = "rbd://<FSID>/<POOL>/<IMAGE>/<SNAP>"
@ -206,6 +208,7 @@ class Store(driver.Store):
raise exceptions.BadStoreConfiguration(store_name='rbd',
reason=reason)
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -313,6 +316,7 @@ class Store(driver.Store):
LOG.debug(log_msg % image_name)
raise exceptions.InUseByStore()
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend
@ -390,6 +394,7 @@ class Store(driver.Store):
return (loc.get_uri(), image_size, checksum.hexdigest(), {})
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -31,6 +31,7 @@ from oslo_utils import units
import six
import glance_store
from glance_store import capabilities
from glance_store.common import utils
import glance_store.driver
from glance_store import exceptions
@ -293,6 +294,7 @@ class ChunkedFile(object):
class Store(glance_store.driver.Store):
"""An implementation of the s3 adapter."""
_CAPABILITIES = capabilities.RW_ACCESS
OPTIONS = _S3_OPTS
EXAMPLE_URL = "s3://<ACCESS_KEY>:<SECRET_KEY>@<S3_URL>/<BUCKET>/<OBJ>"
@ -364,6 +366,7 @@ class Store(glance_store.driver.Store):
reason=reason)
return result
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -426,6 +429,7 @@ class Store(glance_store.driver.Store):
return key
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend
@ -651,6 +655,7 @@ class Store(glance_store.driver.Store):
"key=%(obj_name)s") % {'obj_name': obj_name})
raise glance_store.BackendException(msg)
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -24,6 +24,7 @@ from oslo_utils import excutils
from oslo_utils import units
import glance_store
from glance_store import capabilities
import glance_store.driver
from glance_store import exceptions
from glance_store.i18n import _
@ -173,6 +174,7 @@ class ImageIterator(object):
class Store(glance_store.driver.Store):
"""Sheepdog backend adapter."""
_CAPABILITIES = (capabilities.RW_ACCESS | capabilities.DRIVER_REUSABLE)
OPTIONS = _SHEEPDOG_OPTS
EXAMPLE_URL = "sheepdog://image"
@ -209,6 +211,7 @@ class Store(glance_store.driver.Store):
raise exceptions.BadStoreConfiguration(store_name='sheepdog',
reason=reason)
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -247,6 +250,7 @@ class Store(glance_store.driver.Store):
% image.name)
return image.get_size()
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend
@ -289,6 +293,7 @@ class Store(glance_store.driver.Store):
return (location.get_uri(), image_size, checksum.hexdigest(), {})
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -28,6 +28,7 @@ import urllib
import glance_store
from glance_store._drivers.swift import utils as sutils
from glance_store import capabilities
from glance_store.common import auth
from glance_store.common import utils as cutils
from glance_store import driver
@ -374,6 +375,7 @@ Store.OPTIONS = _SWIFT_OPTS + sutils.swift_opts
class BaseStore(driver.Store):
_CAPABILITIES = capabilities.RW_ACCESS
CHUNKSIZE = 65536
OPTIONS = _SWIFT_OPTS + sutils.swift_opts
@ -418,6 +420,7 @@ class BaseStore(driver.Store):
return (resp_headers, resp_body)
@capabilities.check
def get(self, location, connection=None,
offset=0, chunk_size=None, context=None):
location = location.store_location
@ -469,6 +472,7 @@ class BaseStore(driver.Store):
LOG.exception(msg % {'container': container,
'chunk': chunk})
@capabilities.check
def add(self, image_id, image_file, image_size,
connection=None, context=None):
location = self.create_location(image_id, context=context)
@ -597,6 +601,7 @@ class BaseStore(driver.Store):
LOG.error(msg)
raise glance_store.BackendException(msg)
@capabilities.check
def delete(self, location, connection=None, context=None):
location = location.store_location
if not connection:

View File

@ -29,6 +29,7 @@ from oslo_utils import units
import six.moves.urllib.parse as urlparse
import glance_store
from glance_store import capabilities
from glance_store import exceptions
from glance_store.i18n import _
from glance_store.i18n import _LE
@ -220,6 +221,7 @@ class StoreLocation(location.StoreLocation):
class Store(glance_store.Store):
"""An implementation of the VMware datastore adapter."""
_CAPABILITIES = capabilities.RW_ACCESS
OPTIONS = _VMWARE_OPTS
WRITE_CHUNKSIZE = units.Mi
# FIXME(arnaud): re-visit this code once the store API is cleaned up.
@ -300,6 +302,7 @@ class Store(glance_store.Store):
cookie = list(vim_cookies)[0]
return cookie.name + '=' + cookie.value
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
@ -362,6 +365,7 @@ class Store(glance_store.Store):
return (loc.get_uri(), image_file.size,
image_file.checksum.hexdigest(), {})
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""Takes a `glance_store.location.Location` object that indicates
where to find the image file, and returns a tuple of generator
@ -392,6 +396,7 @@ class Store(glance_store.Store):
"""
return self._query(location, 'HEAD')[2]
@capabilities.check
def delete(self, location, context=None):
"""Takes a `glance_store.location.Location` object that indicates
where to find the image file to delete

View File

@ -19,6 +19,7 @@ from oslo.config import cfg
from stevedore import driver
from stevedore import extension
from glance_store import capabilities
from glance_store.common import utils
from glance_store import exceptions
from glance_store import i18n
@ -36,7 +37,15 @@ _STORE_OPTS = [
cfg.StrOpt('default_store', default='file',
help=_("Default scheme to use to store image data. The "
"scheme must be registered by one of the stores "
"defined by the 'stores' config option."))
"defined by the 'stores' config option.")),
cfg.IntOpt('store_capabilities_update_min_interval', default=0,
help=_("Minimum interval seconds to execute updating "
"dynamic storage capabilities based on backend "
"status then. It's not a periodic routine, the "
"update logic will be executed only when interval "
"seconds elapsed and an operation of store has "
"triggered. The feature will be enabled only when "
"the option value greater then zero."))
]
_STORE_CFG_GROUP = 'glance_store'
@ -142,9 +151,9 @@ def _load_store(conf, store_entry, invoke_load=True):
invoke_args=[conf],
invoke_on_load=invoke_load)
return mgr.driver
except RuntimeError:
except RuntimeError as e:
LOG.warn("Failed to load driver %(driver)s."
"The driver will be disabled" % dict(driver=driver))
"The driver will be disabled" % dict(driver=str([driver, e])))
def _load_stores(conf):
@ -186,11 +195,12 @@ def create_stores(conf=CONF):
store_entry, schemes)
scheme_map = {}
loc_cls = store_instance.get_store_location_class()
for scheme in schemes:
loc_cls = store_instance.get_store_location_class()
scheme_map[scheme] = {
'store': store_instance,
'location_class': loc_cls,
'store_entry': store_entry
}
location.register_scheme_map(scheme_map)
store_count += 1
@ -220,7 +230,26 @@ def get_store_from_scheme(scheme):
if scheme not in location.SCHEME_TO_CLS_MAP:
raise exceptions.UnknownScheme(scheme=scheme)
scheme_info = location.SCHEME_TO_CLS_MAP[scheme]
return scheme_info['store']
store = scheme_info['store']
if not store.is_capable(capabilities.DRIVER_REUSABLE):
# Driver instance isn't stateless so it can't
# be reused safely and need recreation.
store_entry = scheme_info['store_entry']
store = _load_store(store.conf, store_entry, invoke_load=True)
store.configure()
try:
scheme_map = {}
loc_cls = store.get_store_location_class()
for scheme in store.get_schemes():
scheme_map[scheme] = {
'store': store,
'location_class': loc_cls,
'store_entry': store_entry
}
location.register_scheme_map(scheme_map)
except NotImplementedError:
scheme_info['store'] = store
return store
def get_store_from_uri(uri):
@ -240,12 +269,9 @@ def get_from_backend(uri, offset=0, chunk_size=None, context=None):
loc = location.get_location_from_uri(uri, conf=CONF)
store = get_store_from_uri(uri)
try:
return store.get(loc, offset=offset,
chunk_size=chunk_size,
context=context)
except NotImplementedError:
raise exceptions.StoreGetNotSupported
return store.get(loc, offset=offset,
chunk_size=chunk_size,
context=context)
def get_size_from_backend(uri, context=None):
@ -253,7 +279,6 @@ def get_size_from_backend(uri, context=None):
loc = location.get_location_from_uri(uri, conf=CONF)
store = get_store_from_uri(uri)
return store.get_size(loc, context=context)
@ -262,11 +287,7 @@ def delete_from_backend(uri, context=None):
loc = location.get_location_from_uri(uri, conf=CONF)
store = get_store_from_uri(uri)
try:
return store.delete(loc, context=context)
except NotImplementedError:
raise exceptions.StoreDeleteNotSupported
return store.delete(loc, context=context)
def get_store_from_location(uri):
@ -340,10 +361,7 @@ def add_to_backend(conf, image_id, data, size, scheme=None, context=None):
if scheme is None:
scheme = conf['glance_store']['default_store']
store = get_store_from_scheme(scheme)
try:
return store_add_to_backend(image_id, data, size, store, context)
except NotImplementedError:
raise exceptions.StoreAddNotSupported
return store_add_to_backend(image_id, data, size, store, context)
def set_acls(location_uri, public=False, read_tenants=[],

View File

@ -0,0 +1,210 @@
# Copyright (c) 2015 IBM, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Glance Store capability"""
import logging
import threading
import time
from eventlet import tpool
from glance_store import exceptions
from glance_store import i18n
_LW = i18n._LW
_STORE_CAPABILITES_UPDATE_SCHEDULING_BOOK = {}
_STORE_CAPABILITES_UPDATE_SCHEDULING_LOCK = threading.Lock()
LOG = logging.getLogger(__name__)
# Store capability constants
NONE = 0b00000000
ALL = 0b11111111
READ_ACCESS = 0b00000001
READ_OFFSET = 0b00000011 # Included READ_ACCESS
READ_CHUNK = 0b00000101 # Included READ_ACCESS
READ_RANDOM = 0b00000111 # READ_OFFSET | READ_CHUNK
WRITE_ACCESS = 0b00001000
WRITE_OFFSET = 0b00011000 # Included WRITE_ACCESS
WRITE_CHUNK = 0b00101000 # Included WRITE_ACCESS
WRITE_RANDOM = 0b00111000 # WRITE_OFFSET | WRITE_CHUNK
RW_ACCESS = 0b00001001 # READ_ACCESS | WRITE_ACCESS
RW_OFFSET = 0b00011011 # READ_OFFSET | WRITE_OFFSET
RW_CHUNK = 0b00101101 # READ_CHUNK | WRITE_CHUNK
RW_RANDOM = 0b00111111 # RW_OFFSET | RW_CHUNK
DRIVER_REUSABLE = 0b01000000 # driver is stateless and can be reused safely
class StoreCapability(object):
def __init__(self):
# Set static store capabilities base on
# current driver implementation.
self._capabilities = getattr(self.__class__, "_CAPABILITIES", 0)
@property
def capabilities(self):
return self._capabilities
@staticmethod
def contains(x, y):
return x & y == y
def update_capabilities(self):
"""
Update dynamic storage capabilities based on current
driver configuration and backend status when needed.
As a hook, the function will be triggered in two cases:
calling once after store driver get configured, it was
used to update dynamic storage capabilities based on
current driver configuration, or calling when the
capabilities checking of an operation failed every time,
this was used to refresh dynamic storage capabilities
based on backend status then.
This function shouldn't raise any exception out.
"""
LOG.debug(("Store %s doesn't support updating dynamic "
"storage capabilities. Please overwrite "
"'update_capabilities' method of the store to "
"implement updating logics if needed.") %
self.__class__.__name__)
def is_capable(self, *capabilities):
"""
Check if requested capability(s) are supported by
current driver instance.
:param capabilities: required capability(s).
"""
caps = 0
for cap in capabilities:
caps |= int(cap)
return self.contains(self.capabilities, caps)
def set_capabilities(self, *dynamic_capabilites):
"""
Set dynamic storage capabilities based on current
driver configuration and backend status.
:param dynamic_capabilites: dynamic storage capability(s).
"""
for cap in dynamic_capabilites:
self._capabilities |= int(cap)
def unset_capabilities(self, *dynamic_capabilites):
"""
Unset dynamic storage capabilities.
:param dynamic_capabilites: dynamic storage capability(s).
"""
caps = 0
for cap in dynamic_capabilites:
caps |= int(cap)
# TODO(zhiyan): Cascaded capability removal is
# skipped currently, we can add it back later
# when a concrete requirement comes out.
# For example, when removing READ_ACCESS, all
# read related capabilities need to be removed
# together, e.g. READ_RANDOM.
self._capabilities &= ~caps
def _schedule_capabilities_update(store):
def _update_capabilities(store, context):
with context['lock']:
if context['updating']:
return
context['updating'] = True
try:
store.update_capabilities()
except Exception:
pass
finally:
context['updating'] = False
# NOTE(zhiyan): Update 'latest_update' field
# in anyway even an exception raised, to
# prevent call problematic routine cyclically.
context['latest_update'] = int(time.time())
global _STORE_CAPABILITES_UPDATE_SCHEDULING_BOOK
book = _STORE_CAPABILITES_UPDATE_SCHEDULING_BOOK
if store not in book:
with _STORE_CAPABILITES_UPDATE_SCHEDULING_LOCK:
if store not in book:
book[store] = {'latest_update': int(time.time()),
'lock': threading.Lock(),
'updating': False}
else:
context = book[store]
# NOTE(zhiyan): We don't need to lock 'latest_update'
# field for check since time increased one-way only.
sec = (int(time.time()) - context['latest_update'] -
store.conf.glance_store.store_capabilities_update_min_interval)
if sec >= 0:
if not context['updating']:
# NOTE(zhiyan): Using a real thread pool instead
# of green pool due to store capabilities updating
# probably calls some inevitably blocking code for
# IO operation on remote or local storage.
# Eventlet allows operator to uses environment var
# EVENTLET_THREADPOOL_SIZE to desired pool size.
tpool.execute(_update_capabilities, store, context)
def check(store_op_fun):
def op_checker(store, *args, **kwargs):
# NOTE(zhiyan): Trigger the hook of updating store
# dynamic capabilities based on current store status.
if store.conf.glance_store.store_capabilities_update_min_interval > 0:
_schedule_capabilities_update(store)
op_cap_map = {
'get': [READ_ACCESS,
READ_OFFSET if kwargs.get('offset') else NONE,
READ_CHUNK if kwargs.get('chunk_size') else NONE],
'add': [WRITE_ACCESS],
'delete': [WRITE_ACCESS]}
op_exec_map = {
'get': (exceptions.StoreRandomGetNotSupported
if kwargs.get('offset') or kwargs.get('chunk_size') else
exceptions.StoreGetNotSupported),
'add': exceptions.StoreAddDisabled,
'delete': exceptions.StoreDeleteNotSupported}
op = store_op_fun.__name__.lower()
try:
req_cap = op_cap_map[op]
except KeyError:
LOG.warn(_LW('The capability of operation "%s" '
'could not be checked.' % op))
else:
if not store.is_capable(*req_cap):
kwargs.setdefault('offset', 0)
kwargs.setdefault('chunk_size', None)
raise op_exec_map[op](**kwargs)
return store_op_fun(store, *args, **kwargs)
return op_checker

View File

@ -21,14 +21,16 @@ import logging
from oslo.config import cfg
from oslo.utils import importutils
from glance_store import capabilities
from glance_store.common import utils
from glance_store import exceptions
from glance_store.i18n import _
from glance_store import i18n
_ = i18n._
LOG = logging.getLogger(__name__)
class Store(object):
class Store(capabilities.StoreCapability):
OPTIONS = None
READ_CHUNKSIZE = 16 * (1024 * 1024) # 16M
@ -38,6 +40,9 @@ class Store(object):
"""
Initialize the Store
"""
super(Store, self).__init__()
self.conf = conf
self.store_location_class = None
@ -55,21 +60,23 @@ class Store(object):
def configure(self):
"""
Configure the Store to use the stored configuration options
Configure the store to use the stored configuration options
and initialize capabilities based on current configuration.
Any store that needs special configuration should implement
this method.
"""
try:
self.configure_add()
self.add = getattr(self, '_add', self.add)
except exceptions.BadStoreConfiguration as e:
self._add = self.add
self.add = self.add_disabled
self.unset_capabilities(capabilities.WRITE_ACCESS)
msg = (_(u"Failed to configure store correctly: %s "
"Disabling add method.") % utils.exception_to_str(e))
LOG.warn(msg)
self.update_capabilities()
def get_schemes(self):
"""
Returns a tuple of schemes which this store can handle.
@ -96,6 +103,7 @@ class Store(object):
"""
# NOTE(flaper87): This should probably go away
@capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None):
"""
Takes a `glance_store.location.Location` object that indicates
@ -119,14 +127,7 @@ class Store(object):
"""
raise NotImplementedError
def add_disabled(self, *args, **kwargs):
"""
Add method that raises an exception because the Store was
not able to be configured properly and therefore the add()
method would error out.
"""
raise exceptions.StoreAddDisabled
@capabilities.check
def add(self, image_id, image_file, image_size, context=None):
"""
Stores an image file with supplied identifier to the backend
@ -144,6 +145,7 @@ class Store(object):
"""
raise NotImplementedError
@capabilities.check
def delete(self, location, context=None):
"""
Takes a `glance_store.location.Location` object that indicates

View File

@ -146,8 +146,9 @@ class StoreGetNotSupported(GlanceStoreException):
message = _("Getting images from this store is not supported.")
class StoreAddNotSupported(GlanceStoreException):
message = _("Adding images to this store is not supported.")
class StoreRandomGetNotSupported(StoreGetNotSupported):
message = _("Getting images randomly from this store is not supported. "
"Offset: %(offset)s, length: %(chunk_size)s")
class StoreAddDisabled(GlanceStoreException):

View File

@ -68,14 +68,15 @@ class StoreBaseTest(base.BaseTestCase):
for k, v in kw.iteritems():
self.conf.set_override(k, v, group)
def register_store_schemes(self, store):
def register_store_schemes(self, store, store_entry):
schemes = store.get_schemes()
scheme_map = {}
loc_cls = store.get_store_location_class()
for scheme in schemes:
loc_cls = store.get_store_location_class()
scheme_map[scheme] = {
'store': store,
'location_class': loc_cls,
'store_entry': store_entry
}
location.register_scheme_map(scheme_map)

View File

@ -15,11 +15,12 @@
import mock
import glance_store
from glance_store._drivers import cinder
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from tests.unit import test_store_capabilities
class FakeObject(object):
@ -28,13 +29,14 @@ class FakeObject(object):
setattr(self, name, value)
class TestCinderStore(base.StoreBaseTest):
class TestCinderStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
super(TestCinderStore, self).setUp()
self.store = cinder.Store(self.conf)
self.store.configure()
self.register_store_schemes(self.store)
self.register_store_schemes(self.store, 'cinder')
def test_cinder_configure_add(self):
self.assertRaises(exceptions.BadStoreConfiguration,
@ -69,3 +71,18 @@ class TestCinderStore(base.StoreBaseTest):
image_size = self.store.get_size(loc, context=fake_context)
self.assertEqual(image_size,
fake_volumes.values()[0].size * (1024 ** 3))
def test_cinder_delete_raise_error(self):
uri = 'cinder://12345678-9012-3455-6789-012345678901'
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertRaises(exceptions.StoreDeleteNotSupported,
self.store.delete, loc)
self.assertRaises(exceptions.StoreDeleteNotSupported,
glance_store.delete_from_backend, uri, {})
def test_cinder_add_raise_error(self):
self.assertRaises(exceptions.StoreAddDisabled,
self.store.add, None, None, None, None)
self.assertRaises(exceptions.StoreAddDisabled,
glance_store.add_to_backend, None, None,
None, None, 'cinder')

View File

@ -34,12 +34,14 @@ from glance_store._drivers.filesystem import Store
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from tests.unit import test_store_capabilities
KB = 1024
class TestStore(base.StoreBaseTest):
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""

View File

@ -19,6 +19,7 @@ import mock
from glance_store._drivers import gridfs as gfs
from glance_store.tests import base
from tests.unit import test_store_capabilities
try:
import gridfs
@ -75,7 +76,8 @@ class FakeGridFS(object):
return Image
class TestStore(base.StoreBaseTest):
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""

View File

@ -15,15 +15,17 @@
import mock
import glance_store
from glance_store._drivers import http
from glance_store import delete_from_backend
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from glance_store.tests import utils
from tests.unit import test_store_capabilities
class TestHttpStore(base.StoreBaseTest):
class TestHttpStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
super(TestHttpStore, self).setUp()
@ -121,9 +123,17 @@ class TestHttpStore(base.StoreBaseTest):
self._mock_httplib()
uri = "https://netloc/path/to/file.tar.gz"
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertRaises(NotImplementedError, self.store.delete, loc)
self.assertRaises(exceptions.StoreDeleteNotSupported,
delete_from_backend, uri, {})
self.store.delete, loc)
self.assertRaises(exceptions.StoreDeleteNotSupported,
glance_store.delete_from_backend, uri, {})
def test_http_add_raise_error(self):
self.assertRaises(exceptions.StoreAddDisabled,
self.store.add, None, None, None, None)
self.assertRaises(exceptions.StoreAddDisabled,
glance_store.add_to_backend, None, None,
None, None, 'file')
def test_http_get_size_with_non_existent_image_raises_Not_Found(self):
self._mock_httplib()

View File

@ -58,6 +58,7 @@ class OptsTestCase(base.StoreBaseTest):
expected_opt_names = [
'default_store',
'stores',
'store_capabilities_update_min_interval',
'cinder_api_insecure',
'cinder_ca_certificates_file',
'cinder_catalog_info',

View File

@ -21,6 +21,7 @@ from glance_store._drivers import rbd as rbd_store
from glance_store import exceptions
from glance_store.location import Location
from glance_store.tests import base
from tests.unit import test_store_capabilities
class MockRados(object):
@ -141,7 +142,9 @@ class MockRBD(object):
raise NotImplementedError()
class TestStore(base.StoreBaseTest):
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""
super(TestStore, self).setUp()

View File

@ -25,9 +25,11 @@ import mock
from oslo_utils import units
from glance_store._drivers import s3
from glance_store import capabilities
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from tests.unit import test_store_capabilities
FAKE_UUID = str(uuid.uuid4())
@ -260,7 +262,8 @@ def format_s3_location(user, key, authurl, bucket, obj):
bucket, obj)
class TestStore(base.StoreBaseTest):
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""
@ -268,7 +271,7 @@ class TestStore(base.StoreBaseTest):
self.store = s3.Store(self.conf)
self.config(**S3_CONF)
self.store.configure()
self.register_store_schemes(self.store)
self.register_store_schemes(self.store, 's3')
fctor, fbucket = fakers()
@ -486,7 +489,7 @@ class TestStore(base.StoreBaseTest):
self.config(**conf)
self.store = s3.Store(self.conf)
self.store.configure()
return self.store.add == self.store.add_disabled
return not self.store.is_capable(capabilities.WRITE_ACCESS)
except Exception:
return False
return False

View File

@ -20,9 +20,11 @@ from oslo_concurrency import processutils
from glance_store._drivers import sheepdog
from glance_store.tests import base
from tests.unit import test_store_capabilities
class TestSheepdogStore(base.StoreBaseTest):
class TestSheepdogStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
def setUp(self):
"""Establish a clean test environment."""

View File

@ -0,0 +1,143 @@
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance_store import capabilities as caps
from glance_store.tests import base
class FakeStoreWithStaticCapabilities(caps.StoreCapability):
_CAPABILITIES = caps.READ_RANDOM | caps.DRIVER_REUSABLE
class FakeStoreWithDynamicCapabilities(caps.StoreCapability):
def __init__(self, *cap_list):
super(FakeStoreWithDynamicCapabilities, self).__init__()
if not cap_list:
cap_list = [caps.READ_RANDOM, caps.DRIVER_REUSABLE]
self.set_capabilities(*cap_list)
class FakeStoreWithMixedCapabilities(caps.StoreCapability):
_CAPABILITIES = caps.READ_RANDOM
def __init__(self):
super(FakeStoreWithMixedCapabilities, self).__init__()
self.set_capabilities(caps.DRIVER_REUSABLE)
class TestStoreCapabilitiesChecking(object):
def test_store_capabilities_checked_on_io_operations(self):
self.assertEqual('op_checker', self.store.add.__name__)
self.assertEqual('op_checker', self.store.get.__name__)
self.assertEqual('op_checker', self.store.delete.__name__)
class TestStoreCapabilities(base.StoreBaseTest):
def _verify_store_capabilities(self, store):
# This function tested is_capable() as well.
self.assertTrue(store.is_capable(caps.READ_RANDOM))
self.assertTrue(store.is_capable(caps.DRIVER_REUSABLE))
self.assertFalse(store.is_capable(caps.WRITE_ACCESS))
def test_static_capabilities_setup(self):
self._verify_store_capabilities(FakeStoreWithStaticCapabilities())
def test_dynamic_capabilities_setup(self):
self._verify_store_capabilities(FakeStoreWithDynamicCapabilities())
def test_mixed_capabilities_setup(self):
self._verify_store_capabilities(FakeStoreWithMixedCapabilities())
def test_set_unset_capabilities(self):
store = FakeStoreWithStaticCapabilities()
self.assertFalse(store.is_capable(caps.WRITE_ACCESS))
# Set and unset single capability on one time
store.set_capabilities(caps.WRITE_ACCESS)
self.assertTrue(store.is_capable(caps.WRITE_ACCESS))
store.unset_capabilities(caps.WRITE_ACCESS)
self.assertFalse(store.is_capable(caps.WRITE_ACCESS))
# Set and unset multiple capabilities on one time
cap_list = [caps.WRITE_ACCESS, caps.WRITE_OFFSET]
store.set_capabilities(*cap_list)
self.assertTrue(store.is_capable(*cap_list))
store.unset_capabilities(*cap_list)
self.assertFalse(store.is_capable(*cap_list))
def test_store_capabilities_property(self):
store1 = FakeStoreWithDynamicCapabilities()
self.assertTrue(hasattr(store1, 'capabilities'))
store2 = FakeStoreWithMixedCapabilities()
self.assertEqual(store1.capabilities, store2.capabilities)
def test_cascaded_unset_capabilities(self):
# Test read capability
store = FakeStoreWithMixedCapabilities()
self._verify_store_capabilities(store)
store.unset_capabilities(caps.READ_ACCESS)
cap_list = [caps.READ_ACCESS, caps.READ_OFFSET,
caps.READ_CHUNK, caps.READ_RANDOM]
for cap in cap_list:
# To make sure all of them are unsetted.
self.assertFalse(store.is_capable(cap))
self.assertTrue(store.is_capable(caps.DRIVER_REUSABLE))
# Test write capability
store = FakeStoreWithDynamicCapabilities(caps.WRITE_RANDOM,
caps.DRIVER_REUSABLE)
self.assertTrue(store.is_capable(caps.WRITE_RANDOM))
self.assertTrue(store.is_capable(caps.DRIVER_REUSABLE))
store.unset_capabilities(caps.WRITE_ACCESS)
cap_list = [caps.WRITE_ACCESS, caps.WRITE_OFFSET,
caps.WRITE_CHUNK, caps.WRITE_RANDOM]
for cap in cap_list:
# To make sure all of them are unsetted.
self.assertFalse(store.is_capable(cap))
self.assertTrue(store.is_capable(caps.DRIVER_REUSABLE))
class TestStoreCapabilityConstants(base.StoreBaseTest):
def test_one_single_capability_own_one_bit(self):
cap_list = [
caps.READ_ACCESS,
caps.WRITE_ACCESS,
caps.DRIVER_REUSABLE,
]
for cap in cap_list:
self.assertEqual(1, bin(cap).count('1'))
def test_combined_capability_bits(self):
check = caps.StoreCapability.contains
check(caps.READ_OFFSET, caps.READ_ACCESS)
check(caps.READ_CHUNK, caps.READ_ACCESS)
check(caps.READ_RANDOM, caps.READ_CHUNK)
check(caps.READ_RANDOM, caps.READ_OFFSET)
check(caps.WRITE_OFFSET, caps.WRITE_ACCESS)
check(caps.WRITE_CHUNK, caps.WRITE_ACCESS)
check(caps.WRITE_RANDOM, caps.WRITE_CHUNK)
check(caps.WRITE_RANDOM, caps.WRITE_OFFSET)
check(caps.RW_ACCESS, caps.READ_ACCESS)
check(caps.RW_ACCESS, caps.WRITE_ACCESS)
check(caps.RW_OFFSET, caps.READ_OFFSET)
check(caps.RW_OFFSET, caps.WRITE_OFFSET)
check(caps.RW_CHUNK, caps.READ_CHUNK)
check(caps.RW_CHUNK, caps.WRITE_CHUNK)
check(caps.RW_RANDOM, caps.READ_RANDOM)
check(caps.RW_RANDOM, caps.WRITE_RANDOM)

View File

@ -34,12 +34,15 @@ import swiftclient
from glance_store._drivers.swift import store as swift
from glance_store import backend
from glance_store import BackendException
from glance_store import capabilities
from glance_store.common import auth
from glance_store.common import utils
from glance_store import exceptions
from glance_store import location
from glance_store.openstack.common import context
from glance_store.tests import base
from tests.unit import test_store_capabilities
CONF = cfg.CONF
@ -248,7 +251,8 @@ class SwiftTests(object):
(self.swift_store_user, FAKE_UUID))
self.config(swift_store_multi_tenant=True)
# NOTE(markwash): ensure the image is found
size = backend.get_size_from_backend(uri, context={})
ctxt = context.RequestContext()
size = backend.get_size_from_backend(uri, context=ctxt)
self.assertEqual(size, 5120)
def test_get(self):
@ -757,7 +761,7 @@ class SwiftTests(object):
try:
self.config(**conf)
self.store = Store(self.conf)
return self.store.add == self.store.add_disabled
return not self.store.is_capable(capabilities.WRITE_ACCESS)
except Exception:
return False
return False
@ -771,7 +775,7 @@ class SwiftTests(object):
'authurl.com', 'user': '',
'key': ''}}
self.store.configure()
self.assertEqual(self.store.add, self.store.add_disabled)
self.assertFalse(self.store.is_capable(capabilities.WRITE_ACCESS))
def test_no_auth_address(self):
"""
@ -782,12 +786,18 @@ class SwiftTests(object):
'', 'user': 'user1',
'key': 'key1'}}
self.store.configure()
self.assertEqual(self.store.add, self.store.add_disabled)
self.assertFalse(self.store.is_capable(capabilities.WRITE_ACCESS))
def test_delete(self):
"""
Test we can delete an existing image in the swift store
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
@ -799,6 +809,12 @@ class SwiftTests(object):
"""
Test we can delete an existing image in the swift store
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift+config://ref1/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
@ -810,6 +826,12 @@ class SwiftTests(object):
Test that trying to delete a swift that doesn't exist
raises an error
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
loc = location.get_location_from_uri(
"swift://%s:key@authurl/glance/noexist" % (self.swift_store_user),
conf=self.conf)
@ -843,6 +865,12 @@ class SwiftTests(object):
else:
pass
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
reload(swift)
self.store = Store(self.conf)
self.store.configure()
loc_uri = "swift+https://%s:key@localhost:8080/glance/%s"
loc_uri = loc_uri % (self.swift_store_user, test_image_id)
loc = location.get_location_from_uri(loc_uri)
@ -910,7 +938,8 @@ class SwiftTests(object):
'frank:*,jim:*')
class TestStoreAuthV1(base.StoreBaseTest, SwiftTests):
class TestStoreAuthV1(base.StoreBaseTest, SwiftTests,
test_store_capabilities.TestStoreCapabilitiesChecking):
_CONF = cfg.CONF
@ -935,7 +964,7 @@ class TestStoreAuthV1(base.StoreBaseTest, SwiftTests):
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store)
self.register_store_schemes(self.store, 'swift')
self.addCleanup(self.conf.reset)
@ -1166,7 +1195,7 @@ class TestMultiTenantStoreContext(base.StoreBaseTest):
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store)
self.register_store_schemes(self.store, 'swift')
self.service_catalog = [{
"name": "Object Storage",
"type": "object-store",

View File

@ -29,6 +29,7 @@ from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from glance_store.tests import utils
from tests.unit import test_store_capabilities
FAKE_UUID = str(uuid.uuid4())
@ -78,7 +79,8 @@ class FakeHTTPConnection(object):
pass
class TestStore(base.StoreBaseTest):
class TestStore(base.StoreBaseTest,
test_store_capabilities.TestStoreCapabilitiesChecking):
@mock.patch('oslo.vmware.api.VMwareAPISession', auptospec=True)
def setUp(self, mock_session):