Multi store support for http, swift, sheepdog and vmware driver
Added multi store support for http, swift, sheepdog and vmware driver. The default behavior is maintained for backward compatibility. DocImpact Partial-Implements: bp multi-store Change-Id: I93ccdafc6e740065ff4ca3adc6b49eb82e8afa10
This commit is contained in:
parent
87114c8ec7
commit
2636a4121b
@ -287,7 +287,8 @@ class Store(glance_store.driver.Store):
|
||||
self.conf,
|
||||
uri=url,
|
||||
image_id=image_id,
|
||||
store_specs=store_specs)
|
||||
store_specs=store_specs,
|
||||
backend=self.backend_group)
|
||||
|
||||
@staticmethod
|
||||
def _check_store_uri(conn, loc):
|
||||
@ -317,9 +318,15 @@ class Store(glance_store.driver.Store):
|
||||
def _get_response(self, location, verb):
|
||||
if not hasattr(self, 'session'):
|
||||
self.session = requests.Session()
|
||||
ca_bundle = self.conf.glance_store.https_ca_certificates_file
|
||||
disable_https = self.conf.glance_store.https_insecure
|
||||
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
ca_bundle = store_conf.https_ca_certificates_file
|
||||
disable_https = store_conf.https_insecure
|
||||
self.session.verify = ca_bundle if ca_bundle else not disable_https
|
||||
self.session.proxies = self.conf.glance_store.http_proxy_information
|
||||
self.session.proxies = store_conf.http_proxy_information
|
||||
return self.session.request(verb, location.get_uri(), stream=True,
|
||||
allow_redirects=False)
|
||||
|
@ -230,9 +230,14 @@ class StoreLocation(glance_store.location.StoreLocation):
|
||||
self.addr = pieces[0]
|
||||
# This is used for backwards compatibility.
|
||||
else:
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
self.image = pieces[0]
|
||||
self.port = self.conf.glance_store.sheepdog_store_port
|
||||
self.addr = self.conf.glance_store.sheepdog_store_address
|
||||
self.port = store_conf.sheepdog_store_port
|
||||
self.addr = store_conf.sheepdog_store_address
|
||||
|
||||
|
||||
class ImageIterator(object):
|
||||
@ -272,15 +277,19 @@ class Store(glance_store.driver.Store):
|
||||
this method. If the store was not able to successfully configure
|
||||
itself, it should raise `exceptions.BadStoreConfiguration`
|
||||
"""
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
try:
|
||||
chunk_size = self.conf.glance_store.sheepdog_store_chunk_size
|
||||
chunk_size = store_conf.sheepdog_store_chunk_size
|
||||
self.chunk_size = chunk_size * units.Mi
|
||||
self.READ_CHUNKSIZE = self.chunk_size
|
||||
self.WRITE_CHUNKSIZE = self.READ_CHUNKSIZE
|
||||
|
||||
self.addr = self.conf.glance_store.sheepdog_store_address
|
||||
self.port = self.conf.glance_store.sheepdog_store_port
|
||||
self.addr = store_conf.sheepdog_store_address
|
||||
self.port = store_conf.sheepdog_store_port
|
||||
except cfg.ConfigFileValueError as e:
|
||||
reason = _("Error in store configuration: %s") % e
|
||||
LOG.error(reason)
|
||||
@ -362,7 +371,7 @@ class Store(glance_store.driver.Store):
|
||||
'image': image_id,
|
||||
'addr': self.addr,
|
||||
'port': self.port
|
||||
}, self.conf)
|
||||
}, self.conf, backend_group=self.backend_group)
|
||||
|
||||
image.create(image_size)
|
||||
|
||||
@ -389,7 +398,11 @@ class Store(glance_store.driver.Store):
|
||||
with excutils.save_and_reraise_exception():
|
||||
image.delete()
|
||||
|
||||
return (location.get_uri(), offset, checksum.hexdigest(), {})
|
||||
metadata = {}
|
||||
if self.backend_group:
|
||||
metadata['backend'] = u"%s" % self.backend_group
|
||||
|
||||
return (location.get_uri(), offset, checksum.hexdigest(), metadata)
|
||||
|
||||
@capabilities.check
|
||||
def delete(self, location, context=None):
|
||||
|
@ -90,15 +90,21 @@ class BufferedReader(object):
|
||||
to ensure there is enough disk space available.
|
||||
"""
|
||||
|
||||
def __init__(self, fd, checksum, total, verifier=None):
|
||||
def __init__(self, fd, checksum, total, verifier=None, backend_group=None):
|
||||
self.fd = fd
|
||||
self.total = total
|
||||
self.checksum = checksum
|
||||
self.verifier = verifier
|
||||
self.backend_group = backend_group
|
||||
# maintain a pointer to use to update checksum and verifier
|
||||
self.update_position = 0
|
||||
|
||||
if self.backend_group:
|
||||
buffer_dir = getattr(CONF,
|
||||
self.backend_group).swift_upload_buffer_dir
|
||||
else:
|
||||
buffer_dir = CONF.glance_store.swift_upload_buffer_dir
|
||||
|
||||
self._tmpfile = tempfile.TemporaryFile(dir=buffer_dir)
|
||||
|
||||
self._buffered = False
|
||||
|
@ -83,9 +83,15 @@ class SwiftConnectionManager(object):
|
||||
auth_ref = self.client.session.auth.auth_ref
|
||||
# if connection token is going to expire soon (keystone checks
|
||||
# is token is going to expire or expired already)
|
||||
if auth_ref.will_expire_soon(
|
||||
self.store.conf.glance_store.swift_store_expire_soon_interval
|
||||
):
|
||||
if self.store.backend_group:
|
||||
interval = getattr(
|
||||
self.store.conf, self.store.backend_group
|
||||
).swift_store_expire_soon_interval
|
||||
else:
|
||||
store_conf = self.store.conf.glance_store
|
||||
interval = store_conf.swift_store_expire_soon_interval
|
||||
|
||||
if auth_ref.will_expire_soon(interval):
|
||||
LOG.info(_LI("Requesting new token for swift connection."))
|
||||
# request new token with session and client provided by store
|
||||
auth_token = self.client.session.get_auth_headers().get(
|
||||
|
@ -495,7 +495,13 @@ def swift_retry_iter(resp_iter, length, store, location, manager):
|
||||
retries = 0
|
||||
bytes_read = 0
|
||||
|
||||
while retries <= store.conf.glance_store.swift_store_retry_get_count:
|
||||
if store.backend_group:
|
||||
rcount = getattr(store.conf,
|
||||
store.backend_group).swift_store_retry_get_count
|
||||
else:
|
||||
rcount = store.conf.glance_store.swift_store_retry_get_count
|
||||
|
||||
while retries <= rcount:
|
||||
try:
|
||||
for chunk in resp_iter:
|
||||
yield chunk
|
||||
@ -506,20 +512,18 @@ def swift_retry_iter(resp_iter, length, store, location, manager):
|
||||
% encodeutils.exception_to_unicode(e))
|
||||
|
||||
if bytes_read != length:
|
||||
if retries == store.conf.glance_store.swift_store_retry_get_count:
|
||||
if retries == rcount:
|
||||
# terminate silently and let higher level decide
|
||||
LOG.error(_LE("Stopping Swift retries after %d "
|
||||
"attempts") % retries)
|
||||
break
|
||||
else:
|
||||
retries += 1
|
||||
glance_conf = store.conf.glance_store
|
||||
retry_count = glance_conf.swift_store_retry_get_count
|
||||
LOG.info(_LI("Retrying Swift connection "
|
||||
"(%(retries)d/%(max_retries)d) with "
|
||||
"range=%(start)d-%(end)d"),
|
||||
{'retries': retries,
|
||||
'max_retries': retry_count,
|
||||
'max_retries': rcount,
|
||||
'start': bytes_read,
|
||||
'end': length})
|
||||
(_resp_headers, resp_iter) = store._get_object(location,
|
||||
@ -578,6 +582,10 @@ class StoreLocation(location.StoreLocation):
|
||||
if not credentials_included:
|
||||
# Used only in case of an add
|
||||
# Get the current store from config
|
||||
if self.backend_group:
|
||||
store = getattr(self.conf,
|
||||
self.backend_group).default_swift_reference
|
||||
else:
|
||||
store = self.conf.glance_store.default_swift_reference
|
||||
|
||||
return '%s://%s/%s/%s' % ('swift+config', store, container, obj)
|
||||
@ -593,7 +601,8 @@ class StoreLocation(location.StoreLocation):
|
||||
|
||||
def _get_conf_value_from_account_ref(self, netloc):
|
||||
try:
|
||||
ref_params = sutils.SwiftParams(self.conf).params
|
||||
ref_params = sutils.SwiftParams(
|
||||
self.conf, backend=self.backend_group).params
|
||||
self.user = ref_params[netloc]['user']
|
||||
self.key = ref_params[netloc]['key']
|
||||
netloc = ref_params[netloc]['auth_address']
|
||||
@ -726,11 +735,21 @@ class StoreLocation(location.StoreLocation):
|
||||
return ''.join([auth_scheme, self.auth_or_store_url])
|
||||
|
||||
|
||||
def Store(conf):
|
||||
def Store(conf, backend=None):
|
||||
group = 'glance_store'
|
||||
if backend:
|
||||
group = backend
|
||||
multi_tenant = getattr(conf, backend).swift_store_multi_tenant
|
||||
default_store = conf.glance_store.default_backend
|
||||
else:
|
||||
default_store = conf.glance_store.default_store
|
||||
multi_tenant = conf.glance_store.swift_store_multi_tenant
|
||||
|
||||
# NOTE(dharinic): Multi-tenant store cannot work with swift config
|
||||
if conf.glance_store.swift_store_multi_tenant:
|
||||
if (conf.glance_store.default_store == 'swift+config' or
|
||||
sutils.is_multiple_swift_store_accounts_enabled(conf)):
|
||||
if multi_tenant:
|
||||
if (default_store == 'swift+config' or
|
||||
sutils.is_multiple_swift_store_accounts_enabled(
|
||||
conf, backend=backend)):
|
||||
msg = _("Swift multi-tenant store cannot be configured to "
|
||||
"work with swift+config. The options "
|
||||
"'swift_store_multi_tenant' and "
|
||||
@ -742,13 +761,13 @@ def Store(conf):
|
||||
reason=msg)
|
||||
try:
|
||||
conf.register_opts(_SWIFT_OPTS + sutils.swift_opts +
|
||||
buffered.BUFFERING_OPTS, group='glance_store')
|
||||
buffered.BUFFERING_OPTS, group=group)
|
||||
except cfg.DuplicateOptError:
|
||||
pass
|
||||
|
||||
if conf.glance_store.swift_store_multi_tenant:
|
||||
return MultiTenantStore(conf)
|
||||
return SingleTenantStore(conf)
|
||||
if multi_tenant:
|
||||
return MultiTenantStore(conf, backend=backend)
|
||||
return SingleTenantStore(conf, backend=backend)
|
||||
|
||||
Store.OPTIONS = _SWIFT_OPTS + sutils.swift_opts + buffered.BUFFERING_OPTS
|
||||
|
||||
@ -771,7 +790,11 @@ class BaseStore(driver.Store):
|
||||
return ('swift+https', 'swift', 'swift+http', 'swift+config')
|
||||
|
||||
def configure(self, re_raise_bsc=False):
|
||||
if self.backend_group:
|
||||
glance_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
glance_conf = self.conf.glance_store
|
||||
|
||||
_obj_size = self._option_get('swift_store_large_object_size')
|
||||
self.large_object_size = _obj_size * ONE_MB
|
||||
_chunk_size = self._option_get('swift_store_large_object_chunk_size')
|
||||
@ -821,10 +844,14 @@ class BaseStore(driver.Store):
|
||||
@capabilities.check
|
||||
def get(self, location, connection=None,
|
||||
offset=0, chunk_size=None, context=None):
|
||||
if self.backend_group:
|
||||
glance_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
glance_conf = self.conf.glance_store
|
||||
|
||||
location = location.store_location
|
||||
# initialize manager to receive valid connections
|
||||
allow_retry = \
|
||||
self.conf.glance_store.swift_store_retry_get_count > 0
|
||||
allow_retry = glance_conf.swift_store_retry_get_count > 0
|
||||
with self.get_manager(location, context,
|
||||
allow_reauth=allow_retry) as manager:
|
||||
(resp_headers, resp_body) = self._get_object(location,
|
||||
@ -855,7 +882,11 @@ class BaseStore(driver.Store):
|
||||
return 0
|
||||
|
||||
def _option_get(self, param):
|
||||
if self.backend_group:
|
||||
result = getattr(getattr(self.conf, self.backend_group), param)
|
||||
else:
|
||||
result = getattr(self.conf.glance_store, param)
|
||||
|
||||
if not result:
|
||||
reason = (_("Could not find %(param)s in configuration options.")
|
||||
% param)
|
||||
@ -940,8 +971,9 @@ class BaseStore(driver.Store):
|
||||
|
||||
chunk_name = "%s-%05d" % (location.obj, chunk_id)
|
||||
|
||||
with self.reader_class(image_file, checksum,
|
||||
chunk_size, verifier) as reader:
|
||||
with self.reader_class(
|
||||
image_file, checksum, chunk_size, verifier,
|
||||
backend_group=self.backend_group) as reader:
|
||||
if reader.is_zero_size is True:
|
||||
LOG.debug('Not writing zero-length chunk.')
|
||||
break
|
||||
@ -1004,12 +1036,18 @@ class BaseStore(driver.Store):
|
||||
# image data. We *really* should consider NOT returning
|
||||
# the location attribute from GET /images/<ID> and
|
||||
# GET /images/details
|
||||
if sutils.is_multiple_swift_store_accounts_enabled(self.conf):
|
||||
if sutils.is_multiple_swift_store_accounts_enabled(
|
||||
self.conf, backend=self.backend_group):
|
||||
include_creds = False
|
||||
else:
|
||||
include_creds = True
|
||||
|
||||
metadata = {}
|
||||
if self.backend_group:
|
||||
metadata['backend'] = u"%s" % self.backend_group
|
||||
|
||||
return (location.get_uri(credentials_included=include_creds),
|
||||
image_size, obj_etag, {})
|
||||
image_size, obj_etag, metadata)
|
||||
except swiftclient.ClientException as e:
|
||||
if e.http_status == http_client.CONFLICT:
|
||||
msg = _("Swift already has an image at this location")
|
||||
@ -1086,11 +1124,15 @@ class BaseStore(driver.Store):
|
||||
:param container: Name of container to create
|
||||
:param connection: Connection to swift service
|
||||
"""
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
try:
|
||||
connection.head_container(container)
|
||||
except swiftclient.ClientException as e:
|
||||
if e.http_status == http_client.NOT_FOUND:
|
||||
if self.conf.glance_store.swift_store_create_container_on_put:
|
||||
if store_conf.swift_store_create_container_on_put:
|
||||
try:
|
||||
msg = (_LI("Creating swift container %(container)s") %
|
||||
{'container': container})
|
||||
@ -1167,9 +1209,11 @@ class BaseStore(driver.Store):
|
||||
class SingleTenantStore(BaseStore):
|
||||
EXAMPLE_URL = "swift://<USER>:<KEY>@<AUTH_ADDRESS>/<CONTAINER>/<FILE>"
|
||||
|
||||
def __init__(self, conf):
|
||||
super(SingleTenantStore, self).__init__(conf)
|
||||
self.ref_params = sutils.SwiftParams(self.conf).params
|
||||
def __init__(self, conf, backend=None):
|
||||
super(SingleTenantStore, self).__init__(conf, backend=backend)
|
||||
self.backend_group = backend
|
||||
self.ref_params = sutils.SwiftParams(self.conf,
|
||||
backend=backend).params
|
||||
|
||||
def configure(self, re_raise_bsc=False):
|
||||
# set configuration before super so configure_add can override
|
||||
@ -1182,7 +1226,15 @@ class SingleTenantStore(BaseStore):
|
||||
super(SingleTenantStore, self).configure(re_raise_bsc=re_raise_bsc)
|
||||
|
||||
def configure_add(self):
|
||||
if self.backend_group:
|
||||
default_ref = getattr(self.conf,
|
||||
self.backend_group).default_swift_reference
|
||||
self.container = getattr(self.conf,
|
||||
self.backend_group).swift_store_container
|
||||
else:
|
||||
default_ref = self.conf.glance_store.default_swift_reference
|
||||
self.container = self.conf.glance_store.swift_store_container
|
||||
|
||||
default_swift_reference = self.ref_params.get(default_ref)
|
||||
if default_swift_reference:
|
||||
self.auth_address = default_swift_reference.get('auth_address')
|
||||
@ -1195,7 +1247,7 @@ class SingleTenantStore(BaseStore):
|
||||
self.scheme = 'swift+http'
|
||||
else:
|
||||
self.scheme = 'swift+https'
|
||||
self.container = self.conf.glance_store.swift_store_container
|
||||
|
||||
self.auth_version = default_swift_reference.get('auth_version')
|
||||
self.user = default_swift_reference.get('user')
|
||||
self.key = default_swift_reference.get('key')
|
||||
@ -1220,7 +1272,8 @@ class SingleTenantStore(BaseStore):
|
||||
'auth_or_store_url': self.auth_address,
|
||||
'user': self.user,
|
||||
'key': self.key}
|
||||
return StoreLocation(specs, self.conf)
|
||||
return StoreLocation(specs, self.conf,
|
||||
backend_group=self.backend_group)
|
||||
|
||||
def get_container_name(self, image_id, default_image_container):
|
||||
"""
|
||||
@ -1238,8 +1291,14 @@ class SingleTenantStore(BaseStore):
|
||||
:param default_image_container: container name from
|
||||
``swift_store_container``
|
||||
"""
|
||||
if self.backend_group:
|
||||
seed_num_chars = getattr(
|
||||
self.conf,
|
||||
self.backend_group).swift_store_multiple_containers_seed
|
||||
else:
|
||||
seed_num_chars = \
|
||||
self.conf.glance_store.swift_store_multiple_containers_seed
|
||||
|
||||
if seed_num_chars is None \
|
||||
or seed_num_chars < 0 or seed_num_chars > 32:
|
||||
reason = _("An integer value between 0 and 32 is required for"
|
||||
@ -1345,7 +1404,12 @@ class MultiTenantStore(BaseStore):
|
||||
EXAMPLE_URL = "swift://<SWIFT_URL>/<CONTAINER>/<FILE>"
|
||||
|
||||
def _get_endpoint(self, context):
|
||||
if self.backend_group:
|
||||
self.container = getattr(self.conf,
|
||||
self.backend_group).swift_store_container
|
||||
else:
|
||||
self.container = self.conf.glance_store.swift_store_container
|
||||
|
||||
if context is None:
|
||||
reason = _("Multi-tenant Swift storage requires a context.")
|
||||
raise exceptions.BadStoreConfiguration(store_name="swift",
|
||||
@ -1418,7 +1482,8 @@ class MultiTenantStore(BaseStore):
|
||||
'container': self.container + '_' + str(image_id),
|
||||
'obj': str(image_id),
|
||||
'auth_or_store_url': ep}
|
||||
return StoreLocation(specs, self.conf)
|
||||
return StoreLocation(specs, self.conf,
|
||||
backend_group=self.backend_group)
|
||||
|
||||
def get_connection(self, location, context=None):
|
||||
return swiftclient.Connection(
|
||||
@ -1430,8 +1495,14 @@ class MultiTenantStore(BaseStore):
|
||||
|
||||
def init_client(self, location, context=None):
|
||||
# read client parameters from config files
|
||||
ref_params = sutils.SwiftParams(self.conf).params
|
||||
ref_params = sutils.SwiftParams(self.conf,
|
||||
backend=self.backend_group).params
|
||||
if self.backend_group:
|
||||
default_ref = getattr(self.conf,
|
||||
self.backend_group).default_swift_reference
|
||||
else:
|
||||
default_ref = self.conf.glance_store.default_swift_reference
|
||||
|
||||
default_swift_reference = ref_params.get(default_ref)
|
||||
if not default_swift_reference:
|
||||
reason = _("default_swift_reference %s is "
|
||||
@ -1503,7 +1574,13 @@ class MultiTenantStore(BaseStore):
|
||||
def get_manager(self, store_location, context=None, allow_reauth=False):
|
||||
# if global toggle is turned off then do not allow re-authentication
|
||||
# with trusts
|
||||
if not self.conf.glance_store.swift_store_use_trusts:
|
||||
if self.backend_group:
|
||||
use_trusts = getattr(self.conf,
|
||||
self.backend_group).swift_store_use_trusts
|
||||
else:
|
||||
use_trusts = self.conf.glance_store.swift_store_use_trusts
|
||||
|
||||
if not use_trusts:
|
||||
allow_reauth = False
|
||||
|
||||
return connection_manager.MultiTenantConnectionManager(self,
|
||||
@ -1513,11 +1590,13 @@ class MultiTenantStore(BaseStore):
|
||||
|
||||
|
||||
class ChunkReader(object):
|
||||
def __init__(self, fd, checksum, total, verifier=None):
|
||||
def __init__(self, fd, checksum, total, verifier=None,
|
||||
backend_group=None):
|
||||
self.fd = fd
|
||||
self.checksum = checksum
|
||||
self.total = total
|
||||
self.verifier = verifier
|
||||
self.backend_group = backend_group
|
||||
self.bytes_read = 0
|
||||
self.is_zero_size = False
|
||||
self.byteone = fd.read(1)
|
||||
|
@ -111,30 +111,39 @@ else:
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_multiple_swift_store_accounts_enabled(conf):
|
||||
if conf.glance_store.swift_store_config_file is None:
|
||||
def is_multiple_swift_store_accounts_enabled(conf, backend=None):
|
||||
if backend:
|
||||
cfg_file = getattr(conf, backend).swift_store_config_file
|
||||
else:
|
||||
cfg_file = conf.glance_store.swift_store_config_file
|
||||
|
||||
if cfg_file is None:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class SwiftParams(object):
|
||||
def __init__(self, conf):
|
||||
def __init__(self, conf, backend=None):
|
||||
self.conf = conf
|
||||
if is_multiple_swift_store_accounts_enabled(self.conf):
|
||||
self.backend_group = backend
|
||||
if is_multiple_swift_store_accounts_enabled(
|
||||
self.conf, backend=backend):
|
||||
self.params = self._load_config()
|
||||
else:
|
||||
self.params = self._form_default_params()
|
||||
|
||||
def _form_default_params(self):
|
||||
default = {}
|
||||
|
||||
if self.backend_group:
|
||||
glance_store = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
glance_store = self.conf.glance_store
|
||||
if (
|
||||
self.conf.glance_store.swift_store_user and
|
||||
self.conf.glance_store.swift_store_key and
|
||||
self.conf.glance_store.swift_store_auth_address
|
||||
glance_store.swift_store_user and
|
||||
glance_store.swift_store_key and
|
||||
glance_store.swift_store_auth_address
|
||||
):
|
||||
|
||||
glance_store = self.conf.glance_store
|
||||
default['user'] = glance_store.swift_store_user
|
||||
default['key'] = glance_store.swift_store_key
|
||||
default['auth_address'] = glance_store.swift_store_auth_address
|
||||
@ -147,14 +156,18 @@ class SwiftParams(object):
|
||||
return {}
|
||||
|
||||
def _load_config(self):
|
||||
try:
|
||||
if self.backend_group:
|
||||
scf = getattr(self.conf,
|
||||
self.backend_group).swift_store_config_file
|
||||
else:
|
||||
scf = self.conf.glance_store.swift_store_config_file
|
||||
try:
|
||||
conf_file = self.conf.find_file(scf)
|
||||
CONFIG.read(conf_file)
|
||||
except Exception as e:
|
||||
msg = (_("swift config file "
|
||||
"%(conf)s:%(exc)s not found"),
|
||||
{'conf': self.conf.glance_store.swift_store_config_file,
|
||||
{'conf': scf,
|
||||
'exc': e})
|
||||
LOG.error(msg)
|
||||
raise exceptions.BadStoreConfiguration(store_name='swift',
|
||||
@ -177,6 +190,11 @@ class SwiftParams(object):
|
||||
try:
|
||||
reference['auth_version'] = CONFIG.get(ref, 'auth_version')
|
||||
except configparser.NoOptionError:
|
||||
if self.backend_group:
|
||||
av = getattr(
|
||||
self.conf,
|
||||
self.backend_group).swift_store_auth_version
|
||||
else:
|
||||
av = self.conf.glance_store.swift_store_auth_version
|
||||
reference['auth_version'] = av
|
||||
|
||||
|
@ -284,10 +284,12 @@ class StoreLocation(location.StoreLocation):
|
||||
vsphere://server_host/folder/file_path?dcPath=dc_path&dsName=ds_name
|
||||
"""
|
||||
|
||||
def __init__(self, store_specs, conf):
|
||||
super(StoreLocation, self).__init__(store_specs, conf)
|
||||
def __init__(self, store_specs, conf, backend_group=None):
|
||||
super(StoreLocation, self).__init__(store_specs, conf,
|
||||
backend_group=backend_group)
|
||||
self.datacenter_path = None
|
||||
self.datastore_name = None
|
||||
self.backend_group = backend_group
|
||||
|
||||
def process_specs(self):
|
||||
self.scheme = self.specs.get('scheme', STORE_SCHEME)
|
||||
@ -359,8 +361,8 @@ class Store(glance_store.Store):
|
||||
OPTIONS = _VMWARE_OPTS
|
||||
WRITE_CHUNKSIZE = units.Mi
|
||||
|
||||
def __init__(self, conf):
|
||||
super(Store, self).__init__(conf)
|
||||
def __init__(self, conf, backend=None):
|
||||
super(Store, self).__init__(conf, backend=backend)
|
||||
self.datastores = {}
|
||||
|
||||
def reset_session(self):
|
||||
@ -375,13 +377,18 @@ class Store(glance_store.Store):
|
||||
return (STORE_SCHEME,)
|
||||
|
||||
def _sanity_check(self):
|
||||
if self.conf.glance_store.vmware_api_retry_count <= 0:
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
if store_conf.vmware_api_retry_count <= 0:
|
||||
msg = _('vmware_api_retry_count should be greater than zero')
|
||||
LOG.error(msg)
|
||||
raise exceptions.BadStoreConfiguration(
|
||||
store_name='vmware_datastore', reason=msg)
|
||||
|
||||
if self.conf.glance_store.vmware_task_poll_interval <= 0:
|
||||
if store_conf.vmware_task_poll_interval <= 0:
|
||||
msg = _('vmware_task_poll_interval should be greater than zero')
|
||||
LOG.error(msg)
|
||||
raise exceptions.BadStoreConfiguration(
|
||||
@ -393,10 +400,16 @@ class Store(glance_store.Store):
|
||||
self.server_host = self._option_get('vmware_server_host')
|
||||
self.server_username = self._option_get('vmware_server_username')
|
||||
self.server_password = self._option_get('vmware_server_password')
|
||||
self.api_retry_count = self.conf.glance_store.vmware_api_retry_count
|
||||
self.tpoll_interval = self.conf.glance_store.vmware_task_poll_interval
|
||||
self.ca_file = self.conf.glance_store.vmware_ca_file
|
||||
self.api_insecure = self.conf.glance_store.vmware_insecure
|
||||
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
self.api_retry_count = store_conf.vmware_api_retry_count
|
||||
self.tpoll_interval = store_conf.vmware_task_poll_interval
|
||||
self.ca_file = store_conf.vmware_ca_file
|
||||
self.api_insecure = store_conf.vmware_insecure
|
||||
if api is None:
|
||||
msg = _("Missing dependencies: oslo_vmware")
|
||||
raise exceptions.BadStoreConfiguration(
|
||||
@ -492,7 +505,13 @@ class Store(glance_store.Store):
|
||||
def configure_add(self):
|
||||
datastores = self._option_get('vmware_datastores')
|
||||
self.datastores = self._build_datastore_weighted_map(datastores)
|
||||
self.store_image_dir = self.conf.glance_store.vmware_store_image_dir
|
||||
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
self.store_image_dir = store_conf.vmware_store_image_dir
|
||||
|
||||
def select_datastore(self, image_size):
|
||||
"""Select a datastore with free space larger than image size."""
|
||||
@ -513,7 +532,12 @@ class Store(glance_store.Store):
|
||||
raise exceptions.StorageFull()
|
||||
|
||||
def _option_get(self, param):
|
||||
result = getattr(self.conf.glance_store, param)
|
||||
if self.backend_group:
|
||||
store_conf = getattr(self.conf, self.backend_group)
|
||||
else:
|
||||
store_conf = self.conf.glance_store
|
||||
|
||||
result = getattr(store_conf, param)
|
||||
if not result:
|
||||
reason = (_("Could not find %(param)s in configuration "
|
||||
"options.") % {'param': param})
|
||||
@ -562,7 +586,8 @@ class Store(glance_store.Store):
|
||||
'image_dir': self.store_image_dir,
|
||||
'datacenter_path': ds.datacenter.path,
|
||||
'datastore_name': ds.name,
|
||||
'image_id': image_id}, self.conf)
|
||||
'image_id': image_id}, self.conf,
|
||||
backend_group=self.backend_group)
|
||||
# NOTE(arnaud): use a decorator when the config is not tied to self
|
||||
cookie = self._build_vim_cookie_header(True)
|
||||
headers = dict(headers)
|
||||
@ -609,8 +634,12 @@ class Store(glance_store.Store):
|
||||
LOG.error(msg)
|
||||
raise exceptions.BackendException(msg)
|
||||
|
||||
metadata = {}
|
||||
if self.backend_group:
|
||||
metadata['backend'] = u"%s" % self.backend_group
|
||||
|
||||
return (loc.get_uri(), image_file.size,
|
||||
image_file.checksum.hexdigest(), {})
|
||||
image_file.checksum.hexdigest(), metadata)
|
||||
|
||||
@capabilities.check
|
||||
def get(self, location, offset=0, chunk_size=None, context=None):
|
||||
@ -760,7 +789,8 @@ class Store(glance_store.Store):
|
||||
self.conf,
|
||||
uri=vsphere_url,
|
||||
image_id=image_id,
|
||||
store_specs=store_specs)
|
||||
store_specs=store_specs,
|
||||
backend=self.backend_group)
|
||||
|
||||
|
||||
def new_session(insecure=False, ca_file=None, total_retries=None):
|
||||
|
@ -108,7 +108,7 @@ def get_location_from_uri_and_backend(uri, backend, conf=CONF):
|
||||
raise exceptions.UnknownScheme(scheme=backend)
|
||||
|
||||
return Location(pieces.scheme, scheme_info['location_class'],
|
||||
conf, uri=uri)
|
||||
conf, uri=uri, backend=backend)
|
||||
|
||||
|
||||
def register_scheme_backend_map(scheme_map):
|
||||
@ -148,7 +148,7 @@ class Location(object):
|
||||
"""
|
||||
|
||||
def __init__(self, store_name, store_location_class, conf,
|
||||
uri=None, image_id=None, store_specs=None):
|
||||
uri=None, image_id=None, store_specs=None, backend=None):
|
||||
"""
|
||||
Create a new Location object.
|
||||
|
||||
@ -161,12 +161,15 @@ class Location(object):
|
||||
:param store_specs: Dictionary of information about the location
|
||||
of the image that is dependent on the backend
|
||||
store
|
||||
:param backend: Name of store backend
|
||||
"""
|
||||
self.store_name = store_name
|
||||
self.image_id = image_id
|
||||
self.store_specs = store_specs or {}
|
||||
self.conf = conf
|
||||
self.store_location = store_location_class(self.store_specs, conf)
|
||||
self.backend_group = backend
|
||||
self.store_location = store_location_class(
|
||||
self.store_specs, conf, backend_group=backend)
|
||||
if uri:
|
||||
self.store_location.parse_uri(uri)
|
||||
|
||||
@ -187,9 +190,10 @@ class StoreLocation(object):
|
||||
Base class that must be implemented by each store
|
||||
"""
|
||||
|
||||
def __init__(self, store_specs, conf):
|
||||
def __init__(self, store_specs, conf, backend_group=None):
|
||||
self.conf = conf
|
||||
self.specs = store_specs
|
||||
self.backend_group = backend_group
|
||||
if self.specs:
|
||||
self.process_specs()
|
||||
|
||||
|
@ -45,6 +45,7 @@ class TestConnectionManager(base.StoreBaseTest):
|
||||
conf=self.conf,
|
||||
auth_version='3')
|
||||
|
||||
store.backend_group = None
|
||||
store.init_client.return_value = self.client
|
||||
return store
|
||||
|
||||
|
217
glance_store/tests/unit/test_multistore_sheepdog.py
Normal file
217
glance_store/tests/unit/test_multistore_sheepdog.py
Normal file
@ -0,0 +1,217 @@
|
||||
# Copyright 2018 RedHat Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from oslo_concurrency import processutils
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import units
|
||||
import six
|
||||
|
||||
import glance_store as store
|
||||
from glance_store._drivers import sheepdog
|
||||
from glance_store import exceptions
|
||||
from glance_store import location
|
||||
from glance_store.tests import base
|
||||
from glance_store.tests.unit import test_store_capabilities as test_cap
|
||||
|
||||
|
||||
class TestSheepdogMultiStore(base.MultiStoreBaseTest,
|
||||
test_cap.TestStoreCapabilitiesChecking):
|
||||
|
||||
# NOTE(flaper87): temporary until we
|
||||
# can move to a fully-local lib.
|
||||
# (Swift store's fault)
|
||||
_CONF = cfg.ConfigOpts()
|
||||
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment."""
|
||||
super(TestSheepdogMultiStore, self).setUp()
|
||||
enabled_backends = {
|
||||
"sheepdog1": "sheepdog",
|
||||
"sheepdog2": "sheepdog",
|
||||
}
|
||||
self.conf = self._CONF
|
||||
self.conf(args=[])
|
||||
self.conf.register_opt(cfg.DictOpt('enabled_backends'))
|
||||
self.config(enabled_backends=enabled_backends)
|
||||
store.register_store_opts(self.conf)
|
||||
self.config(default_backend='sheepdog1', group='glance_store')
|
||||
|
||||
# mock sheepdog commands
|
||||
def _fake_execute(*cmd, **kwargs):
|
||||
pass
|
||||
|
||||
execute = mock.patch.object(processutils, 'execute').start()
|
||||
execute.side_effect = _fake_execute
|
||||
self.addCleanup(execute.stop)
|
||||
|
||||
# Ensure stores + locations cleared
|
||||
location.SCHEME_TO_CLS_BACKEND_MAP = {}
|
||||
|
||||
store.create_multi_stores(self.conf)
|
||||
self.addCleanup(setattr, location, 'SCHEME_TO_CLS_BACKEND_MAP',
|
||||
dict())
|
||||
self.addCleanup(self.conf.reset)
|
||||
|
||||
self.store = sheepdog.Store(self.conf, backend='sheepdog1')
|
||||
self.store.configure()
|
||||
self.store_specs = {'image': '6bd59e6e-c410-11e5-ab67-0a73f1fda51b',
|
||||
'addr': '127.0.0.1',
|
||||
'port': 7000}
|
||||
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'write')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'create')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'exist')
|
||||
def test_add_image(self, mock_exist, mock_create, mock_write):
|
||||
data = six.BytesIO(b'xx')
|
||||
mock_exist.return_value = False
|
||||
|
||||
(uri, size, checksum, loc) = self.store.add('fake_image_id', data, 2)
|
||||
self.assertEqual("sheepdog1", loc["backend"])
|
||||
|
||||
mock_exist.assert_called_once_with()
|
||||
mock_create.assert_called_once_with(2)
|
||||
mock_write.assert_called_once_with(b'xx', 0, 2)
|
||||
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'write')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'create')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'exist')
|
||||
def test_add_image_to_different_backend(self, mock_exist,
|
||||
mock_create, mock_write):
|
||||
self.store = sheepdog.Store(self.conf, backend='sheepdog2')
|
||||
self.store.configure()
|
||||
|
||||
data = six.BytesIO(b'xx')
|
||||
mock_exist.return_value = False
|
||||
|
||||
(uri, size, checksum, loc) = self.store.add('fake_image_id', data, 2)
|
||||
self.assertEqual("sheepdog2", loc["backend"])
|
||||
|
||||
mock_exist.assert_called_once_with()
|
||||
mock_create.assert_called_once_with(2)
|
||||
mock_write.assert_called_once_with(b'xx', 0, 2)
|
||||
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'write')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'exist')
|
||||
def test_add_bad_size_with_image(self, mock_exist, mock_write):
|
||||
data = six.BytesIO(b'xx')
|
||||
mock_exist.return_value = False
|
||||
|
||||
self.assertRaises(exceptions.Forbidden, self.store.add,
|
||||
'fake_image_id', data, 'test')
|
||||
|
||||
mock_exist.assert_called_once_with()
|
||||
self.assertEqual(mock_write.call_count, 0)
|
||||
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'delete')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'write')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'create')
|
||||
@mock.patch.object(sheepdog.SheepdogImage, 'exist')
|
||||
def test_cleanup_when_add_image_exception(self, mock_exist, mock_create,
|
||||
mock_write, mock_delete):
|
||||
data = six.BytesIO(b'xx')
|
||||
mock_exist.return_value = False
|
||||
mock_write.side_effect = exceptions.BackendException
|
||||
|
||||
self.assertRaises(exceptions.BackendException, self.store.add,
|
||||
'fake_image_id', data, 2)
|
||||
|
||||
mock_exist.assert_called_once_with()
|
||||
mock_create.assert_called_once_with(2)
|
||||
mock_write.assert_called_once_with(b'xx', 0, 2)
|
||||
mock_delete.assert_called_once_with()
|
||||
|
||||
def test_add_duplicate_image(self):
|
||||
def _fake_run_command(command, data, *params):
|
||||
if command == "list -r":
|
||||
return "= fake_volume 0 1000"
|
||||
|
||||
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
|
||||
cmd.side_effect = _fake_run_command
|
||||
data = six.BytesIO(b'xx')
|
||||
self.assertRaises(exceptions.Duplicate, self.store.add,
|
||||
'fake_image_id', data, 2)
|
||||
|
||||
def test_get(self):
|
||||
def _fake_run_command(command, data, *params):
|
||||
if command == "list -r":
|
||||
return "= fake_volume 0 1000"
|
||||
|
||||
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
|
||||
cmd.side_effect = _fake_run_command
|
||||
loc = location.Location('test_sheepdog_store',
|
||||
sheepdog.StoreLocation,
|
||||
self.conf, store_specs=self.store_specs,
|
||||
backend='sheepdog1')
|
||||
ret = self.store.get(loc)
|
||||
self.assertEqual(1000, ret[1])
|
||||
|
||||
def test_partial_get(self):
|
||||
loc = location.Location('test_sheepdog_store', sheepdog.StoreLocation,
|
||||
self.conf, store_specs=self.store_specs,
|
||||
backend='sheepdog1')
|
||||
self.assertRaises(exceptions.StoreRandomGetNotSupported,
|
||||
self.store.get, loc, chunk_size=1)
|
||||
|
||||
def test_get_size(self):
|
||||
def _fake_run_command(command, data, *params):
|
||||
if command == "list -r":
|
||||
return "= fake_volume 0 1000"
|
||||
|
||||
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
|
||||
cmd.side_effect = _fake_run_command
|
||||
loc = location.Location('test_sheepdog_store',
|
||||
sheepdog.StoreLocation,
|
||||
self.conf, store_specs=self.store_specs,
|
||||
backend='sheepdog1')
|
||||
ret = self.store.get_size(loc)
|
||||
self.assertEqual(1000, ret)
|
||||
|
||||
def test_delete(self):
|
||||
called_commands = []
|
||||
|
||||
def _fake_run_command(command, data, *params):
|
||||
called_commands.append(command)
|
||||
if command == "list -r":
|
||||
return "= fake_volume 0 1000"
|
||||
|
||||
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
|
||||
cmd.side_effect = _fake_run_command
|
||||
loc = location.Location('test_sheepdog_store',
|
||||
sheepdog.StoreLocation,
|
||||
self.conf, store_specs=self.store_specs,
|
||||
backend='sheepdog1')
|
||||
self.store.delete(loc)
|
||||
self.assertEqual(['list -r', 'delete'], called_commands)
|
||||
|
||||
def test_add_with_verifier(self):
|
||||
"""Test that 'verifier.update' is called when verifier is provided."""
|
||||
verifier = mock.MagicMock(name='mock_verifier')
|
||||
self.store.chunk_size = units.Ki
|
||||
image_id = 'fake_image_id'
|
||||
file_size = units.Ki # 1K
|
||||
file_contents = b"*" * file_size
|
||||
image_file = six.BytesIO(file_contents)
|
||||
|
||||
def _fake_run_command(command, data, *params):
|
||||
pass
|
||||
|
||||
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
|
||||
cmd.side_effect = _fake_run_command
|
||||
(uri, size, checksum, loc) = self.store.add(
|
||||
image_id, image_file, file_size, verifier=verifier)
|
||||
self.assertEqual("sheepdog1", loc["backend"])
|
||||
|
||||
verifier.update.assert_called_with(file_contents)
|
645
glance_store/tests/unit/test_multistore_vmware.py
Normal file
645
glance_store/tests/unit/test_multistore_vmware.py
Normal file
@ -0,0 +1,645 @@
|
||||
# Copyright 2018 RedHat Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests the Multiple VMware Datastore backend store"""
|
||||
|
||||
import hashlib
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import units
|
||||
from oslo_vmware import api
|
||||
from oslo_vmware import exceptions as vmware_exceptions
|
||||
from oslo_vmware.objects import datacenter as oslo_datacenter
|
||||
from oslo_vmware.objects import datastore as oslo_datastore
|
||||
import six
|
||||
|
||||
import glance_store as store
|
||||
import glance_store._drivers.vmware_datastore as vm_store
|
||||
from glance_store import exceptions
|
||||
from glance_store import location
|
||||
from glance_store.tests import base
|
||||
from glance_store.tests.unit import test_store_capabilities
|
||||
from glance_store.tests import utils
|
||||
|
||||
|
||||
FAKE_UUID = str(uuid.uuid4())
|
||||
|
||||
FIVE_KB = 5 * units.Ki
|
||||
|
||||
VMWARE_DS = {
|
||||
'debug': True,
|
||||
'vmware_server_host': '127.0.0.1',
|
||||
'vmware_server_username': 'username',
|
||||
'vmware_server_password': 'password',
|
||||
'vmware_store_image_dir': '/openstack_glance',
|
||||
'vmware_insecure': 'True',
|
||||
'vmware_datastores': ['a:b:0'],
|
||||
}
|
||||
|
||||
|
||||
def format_location(host_ip, folder_name, image_id, datastores):
|
||||
"""
|
||||
Helper method that returns a VMware Datastore store URI given
|
||||
the component pieces.
|
||||
"""
|
||||
scheme = 'vsphere'
|
||||
(datacenter_path, datastore_name, weight) = datastores[0].split(':')
|
||||
return ("%s://%s/folder%s/%s?dcPath=%s&dsName=%s"
|
||||
% (scheme, host_ip, folder_name,
|
||||
image_id, datacenter_path, datastore_name))
|
||||
|
||||
|
||||
def fake_datastore_obj(*args, **kwargs):
|
||||
dc_obj = oslo_datacenter.Datacenter(ref='fake-ref',
|
||||
name='fake-name')
|
||||
dc_obj.path = args[0]
|
||||
return oslo_datastore.Datastore(ref='fake-ref',
|
||||
datacenter=dc_obj,
|
||||
name=args[1])
|
||||
|
||||
|
||||
class TestMultiStore(base.MultiStoreBaseTest,
|
||||
test_store_capabilities.TestStoreCapabilitiesChecking):
|
||||
|
||||
# NOTE(flaper87): temporary until we
|
||||
# can move to a fully-local lib.
|
||||
# (Swift store's fault)
|
||||
_CONF = cfg.ConfigOpts()
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def setUp(self, mock_api_session, mock_get_datastore):
|
||||
"""Establish a clean test environment."""
|
||||
super(TestMultiStore, self).setUp()
|
||||
enabled_backends = {
|
||||
"vmware1": "vmware",
|
||||
"vmware2": "vmware"
|
||||
}
|
||||
self.conf = self._CONF
|
||||
self.conf(args=[])
|
||||
self.conf.register_opt(cfg.DictOpt('enabled_backends'))
|
||||
self.config(enabled_backends=enabled_backends)
|
||||
store.register_store_opts(self.conf)
|
||||
self.config(default_backend='vmware1', group='glance_store')
|
||||
|
||||
# set vmware related config options
|
||||
self.config(group='vmware1',
|
||||
vmware_server_username='admin',
|
||||
vmware_server_password='admin',
|
||||
vmware_server_host='127.0.0.1',
|
||||
vmware_insecure='True',
|
||||
vmware_datastores=['a:b:0'],
|
||||
vmware_store_image_dir='/openstack_glance')
|
||||
|
||||
self.config(group='vmware2',
|
||||
vmware_server_username='admin',
|
||||
vmware_server_password='admin',
|
||||
vmware_server_host='127.0.0.1',
|
||||
vmware_insecure='True',
|
||||
vmware_datastores=['a:b:1'],
|
||||
vmware_store_image_dir='/openstack_glance_1')
|
||||
# Ensure stores + locations cleared
|
||||
location.SCHEME_TO_CLS_BACKEND_MAP = {}
|
||||
|
||||
store.create_multi_stores(self.conf)
|
||||
self.addCleanup(setattr, location, 'SCHEME_TO_CLS_BACKEND_MAP',
|
||||
dict())
|
||||
self.addCleanup(self.conf.reset)
|
||||
|
||||
vm_store.Store.CHUNKSIZE = 2
|
||||
|
||||
mock_get_datastore.side_effect = fake_datastore_obj
|
||||
|
||||
self.store = vm_store.Store(self.conf, backend="vmware1")
|
||||
self.store.configure()
|
||||
|
||||
def _mock_http_connection(self):
|
||||
return mock.patch('six.moves.http_client.HTTPConnection')
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_get(self, mock_api_session):
|
||||
"""Test a "normal" retrieval of an image in chunks."""
|
||||
expected_image_size = 31
|
||||
expected_returns = ['I am a teapot, short and stout\n']
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s"
|
||||
"?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
self.assertEqual(expected_image_size, image_size)
|
||||
chunks = [c for c in image_file]
|
||||
self.assertEqual(expected_returns, chunks)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_get_non_existing(self, mock_api_session):
|
||||
"""
|
||||
Test that trying to retrieve an image that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glan"
|
||||
"ce/%s?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1",
|
||||
conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=404)
|
||||
self.assertRaises(exceptions.NotFound, self.store.get, loc)
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_build_vim_cookie_header')
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch.object(vm_store._Reader, 'size')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_add(self, fake_api_session, fake_size, fake_select_datastore,
|
||||
fake_cookie):
|
||||
"""Test that we can add an image via the VMware backend."""
|
||||
fake_select_datastore.return_value = self.store.datastores[0][0]
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_size = FIVE_KB
|
||||
expected_contents = b"*" * expected_size
|
||||
hash_code = hashlib.md5(expected_contents)
|
||||
expected_checksum = hash_code.hexdigest()
|
||||
fake_size.__get__ = mock.Mock(return_value=expected_size)
|
||||
expected_cookie = 'vmware_soap_session=fake-uuid'
|
||||
fake_cookie.return_value = expected_cookie
|
||||
expected_headers = {'Content-Length': six.text_type(expected_size),
|
||||
'Cookie': expected_cookie}
|
||||
with mock.patch('hashlib.md5') as md5:
|
||||
md5.return_value = hash_code
|
||||
expected_location = format_location(
|
||||
VMWARE_DS['vmware_server_host'],
|
||||
VMWARE_DS['vmware_store_image_dir'],
|
||||
expected_image_id,
|
||||
VMWARE_DS['vmware_datastores'])
|
||||
image = six.BytesIO(expected_contents)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
location, size, checksum, metadata = self.store.add(
|
||||
expected_image_id, image, expected_size)
|
||||
_, kwargs = HttpConn.call_args
|
||||
self.assertEqual(expected_headers, kwargs['headers'])
|
||||
self.assertEqual("vmware1", metadata["backend"])
|
||||
|
||||
self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
|
||||
utils.sort_url_by_qs_keys(location))
|
||||
self.assertEqual(expected_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch.object(vm_store._Reader, 'size')
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_add_size_zero(self, mock_api_session, fake_size,
|
||||
fake_select_datastore):
|
||||
"""
|
||||
Test that when specifying size zero for the image to add,
|
||||
the actual size of the image is returned.
|
||||
"""
|
||||
fake_select_datastore.return_value = self.store.datastores[0][0]
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_size = FIVE_KB
|
||||
expected_contents = b"*" * expected_size
|
||||
hash_code = hashlib.md5(expected_contents)
|
||||
expected_checksum = hash_code.hexdigest()
|
||||
fake_size.__get__ = mock.Mock(return_value=expected_size)
|
||||
with mock.patch('hashlib.md5') as md5:
|
||||
md5.return_value = hash_code
|
||||
expected_location = format_location(
|
||||
VMWARE_DS['vmware_server_host'],
|
||||
VMWARE_DS['vmware_store_image_dir'],
|
||||
expected_image_id,
|
||||
VMWARE_DS['vmware_datastores'])
|
||||
image = six.BytesIO(expected_contents)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
location, size, checksum, metadata = self.store.add(
|
||||
expected_image_id, image, 0)
|
||||
self.assertEqual("vmware1", metadata["backend"])
|
||||
|
||||
self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
|
||||
utils.sort_url_by_qs_keys(location))
|
||||
self.assertEqual(expected_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch('glance_store._drivers.vmware_datastore._Reader')
|
||||
def test_add_with_verifier(self, fake_reader, fake_select_datastore):
|
||||
"""Test that the verifier is passed to the _Reader during add."""
|
||||
verifier = mock.MagicMock(name='mock_verifier')
|
||||
image_id = str(uuid.uuid4())
|
||||
size = FIVE_KB
|
||||
contents = b"*" * size
|
||||
image = six.BytesIO(contents)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
location, size, checksum, metadata = self.store.add(
|
||||
image_id, image, size, verifier=verifier)
|
||||
self.assertEqual("vmware1", metadata["backend"])
|
||||
|
||||
fake_reader.assert_called_with(image, verifier)
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch('glance_store._drivers.vmware_datastore._Reader')
|
||||
def test_add_with_verifier_size_zero(self, fake_reader, fake_select_ds):
|
||||
"""Test that the verifier is passed to the _ChunkReader during add."""
|
||||
verifier = mock.MagicMock(name='mock_verifier')
|
||||
image_id = str(uuid.uuid4())
|
||||
size = FIVE_KB
|
||||
contents = b"*" * size
|
||||
image = six.BytesIO(contents)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
location, size, checksum, metadata = self.store.add(
|
||||
image_id, image, 0, verifier=verifier)
|
||||
self.assertEqual("vmware1", metadata["backend"])
|
||||
|
||||
fake_reader.assert_called_with(image, verifier)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_delete(self, mock_api_session):
|
||||
"""Test we can delete an existing image in the VMware store."""
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s?"
|
||||
"dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
vm_store.Store._service_content = mock.Mock()
|
||||
self.store.delete(loc)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=404)
|
||||
self.assertRaises(exceptions.NotFound, self.store.get, loc)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_delete_non_existing(self, mock_api_session):
|
||||
"""
|
||||
Test that trying to delete an image that doesn't exist raises an error
|
||||
"""
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s?"
|
||||
"dsName=ds1&dcPath=dc1" % FAKE_UUID,
|
||||
"vmware1", conf=self.conf)
|
||||
with mock.patch.object(self.store.session,
|
||||
'wait_for_task') as mock_task:
|
||||
mock_task.side_effect = vmware_exceptions.FileNotFoundException
|
||||
self.assertRaises(exceptions.NotFound, self.store.delete, loc)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_get_size(self, mock_api_session):
|
||||
"""
|
||||
Test we can get the size of an existing image in the VMware store
|
||||
"""
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s"
|
||||
"?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response()
|
||||
image_size = self.store.get_size(loc)
|
||||
self.assertEqual(image_size, 31)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_get_size_non_existing(self, mock_api_session):
|
||||
"""
|
||||
Test that trying to retrieve an image size that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glan"
|
||||
"ce/%s?dsName=ds1&dcPath=dc1" % FAKE_UUID,
|
||||
"vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=404)
|
||||
self.assertRaises(exceptions.NotFound, self.store.get_size, loc)
|
||||
|
||||
def test_reader_full(self):
|
||||
content = b'XXX'
|
||||
image = six.BytesIO(content)
|
||||
expected_checksum = hashlib.md5(content).hexdigest()
|
||||
reader = vm_store._Reader(image)
|
||||
ret = reader.read()
|
||||
self.assertEqual(content, ret)
|
||||
self.assertEqual(expected_checksum, reader.checksum.hexdigest())
|
||||
self.assertEqual(len(content), reader.size)
|
||||
|
||||
def test_reader_partial(self):
|
||||
content = b'XXX'
|
||||
image = six.BytesIO(content)
|
||||
expected_checksum = hashlib.md5(b'X').hexdigest()
|
||||
reader = vm_store._Reader(image)
|
||||
ret = reader.read(1)
|
||||
self.assertEqual(b'X', ret)
|
||||
self.assertEqual(expected_checksum, reader.checksum.hexdigest())
|
||||
self.assertEqual(1, reader.size)
|
||||
|
||||
def test_reader_with_verifier(self):
|
||||
content = b'XXX'
|
||||
image = six.BytesIO(content)
|
||||
verifier = mock.MagicMock(name='mock_verifier')
|
||||
reader = vm_store._Reader(image, verifier)
|
||||
reader.read()
|
||||
verifier.update.assert_called_with(content)
|
||||
|
||||
def test_sanity_check_multiple_datastores(self):
|
||||
self.config(group='vmware1', vmware_api_retry_count=1)
|
||||
self.config(group='vmware1', vmware_task_poll_interval=1)
|
||||
self.config(group='vmware1', vmware_datastores=['a:b:0', 'a:d:0'])
|
||||
try:
|
||||
self.store._sanity_check()
|
||||
except exceptions.BadStoreConfiguration:
|
||||
self.fail()
|
||||
|
||||
def test_parse_datastore_info_and_weight_less_opts(self):
|
||||
datastore = 'a'
|
||||
self.assertRaises(exceptions.BadStoreConfiguration,
|
||||
self.store._parse_datastore_info_and_weight,
|
||||
datastore)
|
||||
|
||||
def test_parse_datastore_info_and_weight_invalid_weight(self):
|
||||
datastore = 'a:b:c'
|
||||
self.assertRaises(exceptions.BadStoreConfiguration,
|
||||
self.store._parse_datastore_info_and_weight,
|
||||
datastore)
|
||||
|
||||
def test_parse_datastore_info_and_weight_empty_opts(self):
|
||||
datastore = 'a: :0'
|
||||
self.assertRaises(exceptions.BadStoreConfiguration,
|
||||
self.store._parse_datastore_info_and_weight,
|
||||
datastore)
|
||||
datastore = ':b:0'
|
||||
self.assertRaises(exceptions.BadStoreConfiguration,
|
||||
self.store._parse_datastore_info_and_weight,
|
||||
datastore)
|
||||
|
||||
def test_parse_datastore_info_and_weight(self):
|
||||
datastore = 'a:b:100'
|
||||
parts = self.store._parse_datastore_info_and_weight(datastore)
|
||||
self.assertEqual('a', parts[0])
|
||||
self.assertEqual('b', parts[1])
|
||||
self.assertEqual('100', parts[2])
|
||||
|
||||
def test_parse_datastore_info_and_weight_default_weight(self):
|
||||
datastore = 'a:b'
|
||||
parts = self.store._parse_datastore_info_and_weight(datastore)
|
||||
self.assertEqual('a', parts[0])
|
||||
self.assertEqual('b', parts[1])
|
||||
self.assertEqual(0, parts[2])
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_unexpected_status(self, mock_api_session, mock_select_datastore):
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_size = FIVE_KB
|
||||
expected_contents = b"*" * expected_size
|
||||
image = six.BytesIO(expected_contents)
|
||||
self.session = mock.Mock()
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=401)
|
||||
self.assertRaises(exceptions.BackendException,
|
||||
self.store.add,
|
||||
expected_image_id, image, expected_size)
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_unexpected_status_no_response_body(self, mock_api_session,
|
||||
mock_select_datastore):
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_size = FIVE_KB
|
||||
expected_contents = b"*" * expected_size
|
||||
image = six.BytesIO(expected_contents)
|
||||
self.session = mock.Mock()
|
||||
with self._mock_http_connection() as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=500,
|
||||
no_response_body=True)
|
||||
self.assertRaises(exceptions.BackendException,
|
||||
self.store.add,
|
||||
expected_image_id, image, expected_size)
|
||||
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_reset_session(self, mock_api_session):
|
||||
self.store.reset_session()
|
||||
self.assertTrue(mock_api_session.called)
|
||||
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_vim_cookie_header_active(self, mock_api_session):
|
||||
self.store.session.is_current_session_active = mock.Mock()
|
||||
self.store.session.is_current_session_active.return_value = True
|
||||
self.store._build_vim_cookie_header(True)
|
||||
self.assertFalse(mock_api_session.called)
|
||||
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_vim_cookie_header_expired(self, mock_api_session):
|
||||
self.store.session.is_current_session_active = mock.Mock()
|
||||
self.store.session.is_current_session_active.return_value = False
|
||||
self.store._build_vim_cookie_header(True)
|
||||
self.assertTrue(mock_api_session.called)
|
||||
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_vim_cookie_header_expired_noverify(self, mock_api_session):
|
||||
self.store.session.is_current_session_active = mock.Mock()
|
||||
self.store.session.is_current_session_active.return_value = False
|
||||
self.store._build_vim_cookie_header()
|
||||
self.assertFalse(mock_api_session.called)
|
||||
|
||||
@mock.patch.object(vm_store.Store, 'select_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_add_ioerror(self, mock_api_session, mock_select_datastore):
|
||||
mock_select_datastore.return_value = self.store.datastores[0][0]
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_size = FIVE_KB
|
||||
expected_contents = b"*" * expected_size
|
||||
image = six.BytesIO(expected_contents)
|
||||
self.session = mock.Mock()
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.request.side_effect = IOError
|
||||
self.assertRaises(exceptions.BackendException,
|
||||
self.store.add,
|
||||
expected_image_id, image, expected_size)
|
||||
|
||||
def test_qs_sort_with_literal_question_mark(self):
|
||||
url = 'scheme://example.com/path?key2=val2&key1=val1?sort=true'
|
||||
exp_url = 'scheme://example.com/path?key1=val1%3Fsort%3Dtrue&key2=val2'
|
||||
self.assertEqual(exp_url,
|
||||
utils.sort_url_by_qs_keys(url))
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_datastore_weighted_map(self, mock_api_session, mock_ds_obj):
|
||||
datastores = ['a:b:100', 'c:d:100', 'e:f:200']
|
||||
mock_ds_obj.side_effect = fake_datastore_obj
|
||||
ret = self.store._build_datastore_weighted_map(datastores)
|
||||
ds = ret[200]
|
||||
self.assertEqual('e', ds[0].datacenter.path)
|
||||
self.assertEqual('f', ds[0].name)
|
||||
ds = ret[100]
|
||||
self.assertEqual(2, len(ds))
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_datastore_weighted_map_equal_weight(self, mock_api_session,
|
||||
mock_ds_obj):
|
||||
datastores = ['a:b:200', 'a:b:200']
|
||||
mock_ds_obj.side_effect = fake_datastore_obj
|
||||
ret = self.store._build_datastore_weighted_map(datastores)
|
||||
ds = ret[200]
|
||||
self.assertEqual(2, len(ds))
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(api, 'VMwareAPISession')
|
||||
def test_build_datastore_weighted_map_empty_list(self, mock_api_session,
|
||||
mock_ds_ref):
|
||||
datastores = []
|
||||
ret = self.store._build_datastore_weighted_map(datastores)
|
||||
self.assertEqual({}, ret)
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(vm_store.Store, '_get_freespace')
|
||||
def test_select_datastore_insufficient_freespace(self, mock_get_freespace,
|
||||
mock_ds_ref):
|
||||
datastores = ['a:b:100', 'c:d:100', 'e:f:200']
|
||||
image_size = 10
|
||||
self.store.datastores = (
|
||||
self.store._build_datastore_weighted_map(datastores))
|
||||
freespaces = [5, 5, 5]
|
||||
|
||||
def fake_get_fp(*args, **kwargs):
|
||||
return freespaces.pop(0)
|
||||
mock_get_freespace.side_effect = fake_get_fp
|
||||
self.assertRaises(exceptions.StorageFull,
|
||||
self.store.select_datastore, image_size)
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(vm_store.Store, '_get_freespace')
|
||||
def test_select_datastore_insufficient_fs_one_ds(self, mock_get_freespace,
|
||||
mock_ds_ref):
|
||||
# Tests if fs is updated with just one datastore.
|
||||
datastores = ['a:b:100']
|
||||
image_size = 10
|
||||
self.store.datastores = (
|
||||
self.store._build_datastore_weighted_map(datastores))
|
||||
freespaces = [5]
|
||||
|
||||
def fake_get_fp(*args, **kwargs):
|
||||
return freespaces.pop(0)
|
||||
mock_get_freespace.side_effect = fake_get_fp
|
||||
self.assertRaises(exceptions.StorageFull,
|
||||
self.store.select_datastore, image_size)
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(vm_store.Store, '_get_freespace')
|
||||
def test_select_datastore_equal_freespace(self, mock_get_freespace,
|
||||
mock_ds_obj):
|
||||
datastores = ['a:b:100', 'c:d:100', 'e:f:200']
|
||||
image_size = 10
|
||||
mock_ds_obj.side_effect = fake_datastore_obj
|
||||
self.store.datastores = (
|
||||
self.store._build_datastore_weighted_map(datastores))
|
||||
freespaces = [11, 11, 11]
|
||||
|
||||
def fake_get_fp(*args, **kwargs):
|
||||
return freespaces.pop(0)
|
||||
mock_get_freespace.side_effect = fake_get_fp
|
||||
|
||||
ds = self.store.select_datastore(image_size)
|
||||
self.assertEqual('e', ds.datacenter.path)
|
||||
self.assertEqual('f', ds.name)
|
||||
|
||||
@mock.patch.object(vm_store.Store, '_get_datastore')
|
||||
@mock.patch.object(vm_store.Store, '_get_freespace')
|
||||
def test_select_datastore_contention(self, mock_get_freespace,
|
||||
mock_ds_obj):
|
||||
datastores = ['a:b:100', 'c:d:100', 'e:f:200']
|
||||
image_size = 10
|
||||
mock_ds_obj.side_effect = fake_datastore_obj
|
||||
self.store.datastores = (
|
||||
self.store._build_datastore_weighted_map(datastores))
|
||||
freespaces = [5, 11, 12]
|
||||
|
||||
def fake_get_fp(*args, **kwargs):
|
||||
return freespaces.pop(0)
|
||||
mock_get_freespace.side_effect = fake_get_fp
|
||||
ds = self.store.select_datastore(image_size)
|
||||
self.assertEqual('c', ds.datacenter.path)
|
||||
self.assertEqual('d', ds.name)
|
||||
|
||||
def test_select_datastore_empty_list(self):
|
||||
datastores = []
|
||||
self.store.datastores = (
|
||||
self.store._build_datastore_weighted_map(datastores))
|
||||
self.assertRaises(exceptions.StorageFull,
|
||||
self.store.select_datastore, 10)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_get_datacenter_ref(self, mock_api_session):
|
||||
datacenter_path = 'Datacenter1'
|
||||
self.store._get_datacenter(datacenter_path)
|
||||
self.store.session.invoke_api.assert_called_with(
|
||||
self.store.session.vim,
|
||||
'FindByInventoryPath',
|
||||
self.store.session.vim.service_content.searchIndex,
|
||||
inventoryPath=datacenter_path)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_http_get_redirect(self, mock_api_session):
|
||||
# Add two layers of redirects to the response stack, which will
|
||||
# return the default 200 OK with the expected data after resolving
|
||||
# both redirects.
|
||||
redirect1 = {"location": "https://example.com?dsName=ds1&dcPath=dc1"}
|
||||
redirect2 = {"location": "https://example.com?dsName=ds2&dcPath=dc2"}
|
||||
responses = [utils.fake_response(),
|
||||
utils.fake_response(status_code=302, headers=redirect1),
|
||||
utils.fake_response(status_code=301, headers=redirect2)]
|
||||
|
||||
def getresponse(*args, **kwargs):
|
||||
return responses.pop()
|
||||
|
||||
expected_image_size = 31
|
||||
expected_returns = ['I am a teapot, short and stout\n']
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s"
|
||||
"?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.side_effect = getresponse
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
self.assertEqual(expected_image_size, image_size)
|
||||
chunks = [c for c in image_file]
|
||||
self.assertEqual(expected_returns, chunks)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_http_get_max_redirects(self, mock_api_session):
|
||||
redirect = {"location": "https://example.com?dsName=ds1&dcPath=dc1"}
|
||||
responses = ([utils.fake_response(status_code=302, headers=redirect)]
|
||||
* (vm_store.MAX_REDIRECTS + 1))
|
||||
|
||||
def getresponse(*args, **kwargs):
|
||||
return responses.pop()
|
||||
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s"
|
||||
"?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.side_effect = getresponse
|
||||
self.assertRaises(exceptions.MaxRedirectsExceeded, self.store.get,
|
||||
loc)
|
||||
|
||||
@mock.patch('oslo_vmware.api.VMwareAPISession')
|
||||
def test_http_get_redirect_invalid(self, mock_api_session):
|
||||
redirect = {"location": "https://example.com?dsName=ds1&dcPath=dc1"}
|
||||
|
||||
loc = location.get_location_from_uri_and_backend(
|
||||
"vsphere://127.0.0.1/folder/openstack_glance/%s"
|
||||
"?dsName=ds1&dcPath=dc1" % FAKE_UUID, "vmware1", conf=self.conf)
|
||||
with mock.patch('requests.Session.request') as HttpConn:
|
||||
HttpConn.return_value = utils.fake_response(status_code=307,
|
||||
headers=redirect)
|
||||
self.assertRaises(exceptions.BadStoreUri, self.store.get, loc)
|
2206
glance_store/tests/unit/test_swift_store_multibackend.py
Normal file
2206
glance_store/tests/unit/test_swift_store_multibackend.py
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user