Allow to specify default store in artifact type config section
This patch implements a feature, that allows to specify default store in type's config section in glare.conf. Example: [artifact_type:images] default_store = swift If value isn't set, then the store defaults to "default_store" from [glance_store] section. Previously it was possible to do as a part of enabled_artifact_types paramter. Default store could be added to a type after colon. Example: [default] enabled_artifact_types = images:swift This approach is considered as wrong and removed from glare. From now default store must be set in artifact type section. Change-Id: I24f34812f527709c311ea3a404cede8c6d99d82b
This commit is contained in:
parent
04a7178fb4
commit
f564fc9f4d
@ -67,10 +67,6 @@ def save_blob_to_store(blob_id, blob, context, max_size,
|
|||||||
:param verifier:signature verified
|
:param verifier:signature verified
|
||||||
:return: tuple of values: (location_uri, size, checksums)
|
:return: tuple of values: (location_uri, size, checksums)
|
||||||
"""
|
"""
|
||||||
if store_type not in set(CONF.glance_store.stores + ['database']):
|
|
||||||
LOG.warning("Incorrect backend configuration - scheme '%s' is not"
|
|
||||||
" supported. Fallback to default store.", store_type)
|
|
||||||
store_type = None
|
|
||||||
data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)
|
data = utils.LimitingReader(utils.CooperativeReader(blob), max_size)
|
||||||
|
|
||||||
LOG.debug('Start uploading blob %s.', blob_id)
|
LOG.debug('Start uploading blob %s.', blob_id)
|
||||||
|
@ -341,7 +341,8 @@ class Engine(object):
|
|||||||
af.validate_delete(context, af)
|
af.validate_delete(context, af)
|
||||||
blobs = af.delete(context, af)
|
blobs = af.delete(context, af)
|
||||||
|
|
||||||
delayed_delete = getattr(CONF, type_name).delayed_delete
|
delayed_delete = getattr(
|
||||||
|
CONF, 'artifact_type:' + type_name).delayed_delete
|
||||||
# use global parameter if delayed delete isn't set per artifact type
|
# use global parameter if delayed delete isn't set per artifact type
|
||||||
if delayed_delete is None:
|
if delayed_delete is None:
|
||||||
delayed_delete = CONF.delayed_delete
|
delayed_delete = CONF.delayed_delete
|
||||||
@ -488,8 +489,12 @@ class Engine(object):
|
|||||||
max_allowed_size = min(max_allowed_size,
|
max_allowed_size = min(max_allowed_size,
|
||||||
max_folder_size_allowed)
|
max_folder_size_allowed)
|
||||||
|
|
||||||
default_store = af.get_default_store(
|
default_store = getattr(
|
||||||
context, af, field_name, blob_key)
|
CONF, 'artifact_type:' + type_name).default_store
|
||||||
|
# use global parameter if default store isn't set per artifact type
|
||||||
|
if default_store is None:
|
||||||
|
default_store = CONF.glance_store.default_store
|
||||||
|
|
||||||
location_uri, size, checksums = store_api.save_blob_to_store(
|
location_uri, size, checksums = store_api.save_blob_to_store(
|
||||||
blob_id, fd, context, max_allowed_size,
|
blob_id, fd, context, max_allowed_size,
|
||||||
store_type=default_store)
|
store_type=default_store)
|
||||||
|
@ -133,12 +133,36 @@ class BaseArtifact(base.VersionedObject):
|
|||||||
"by some other tool in the background. "
|
"by some other tool in the background. "
|
||||||
"Redefines global parameter of the same name "
|
"Redefines global parameter of the same name "
|
||||||
"from [DEFAULT] section.")),
|
"from [DEFAULT] section.")),
|
||||||
|
cfg.StrOpt('default_store',
|
||||||
|
choices=('file', 'filesystem', 'http', 'https', 'swift',
|
||||||
|
'swift+http', 'swift+https', 'swift+config', 'rbd',
|
||||||
|
'sheepdog', 'cinder', 'vsphere', 'database'),
|
||||||
|
help=_("""
|
||||||
|
The default scheme to use for storing artifacts of this
|
||||||
|
type.
|
||||||
|
Provide a string value representing the default scheme to
|
||||||
|
use for storing artifact data. If not set, Glare uses
|
||||||
|
default_store parameter from [glance_store] section.
|
||||||
|
NOTE: The value given for this configuration option must
|
||||||
|
be a valid scheme for a store registered with the ``stores``
|
||||||
|
configuration option.
|
||||||
|
Possible values:
|
||||||
|
* file
|
||||||
|
* filesystem
|
||||||
|
* http
|
||||||
|
* https
|
||||||
|
* swift
|
||||||
|
* swift+http
|
||||||
|
* swift+https
|
||||||
|
* swift+config
|
||||||
|
* rbd
|
||||||
|
* sheepdog
|
||||||
|
* cinder
|
||||||
|
* vsphere
|
||||||
|
* database
|
||||||
|
"""))
|
||||||
]
|
]
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
|
||||||
CONF.register_opts(cls.artifact_type_opts, group=cls.get_type_name())
|
|
||||||
return base.VersionedObject.__new__(cls)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list_artifact_type_opts(cls):
|
def list_artifact_type_opts(cls):
|
||||||
return cls.artifact_type_opts
|
return cls.artifact_type_opts
|
||||||
@ -459,15 +483,6 @@ class BaseArtifact(base.VersionedObject):
|
|||||||
"""Validation hook for deletion."""
|
"""Validation hook for deletion."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_default_store(cls, context=None, af=None,
|
|
||||||
field_name=None, blob_key=None):
|
|
||||||
"""Return a default store type for artifact type."""
|
|
||||||
for t in CONF.enabled_artifact_types:
|
|
||||||
type_name, __, store_name = t.partition(':')
|
|
||||||
if type_name == cls.get_type_name():
|
|
||||||
return store_name
|
|
||||||
|
|
||||||
def to_notification(self):
|
def to_notification(self):
|
||||||
"""Return notification body that can be send to listeners.
|
"""Return notification body that can be send to listeners.
|
||||||
|
|
||||||
|
@ -87,7 +87,12 @@ def upload_content_file(context, af, data, blob_dict, key_name,
|
|||||||
|
|
||||||
# try to perform blob uploading to storage backend
|
# try to perform blob uploading to storage backend
|
||||||
try:
|
try:
|
||||||
default_store = af.get_default_store(context, af, blob_dict, key_name)
|
default_store = getattr(
|
||||||
|
CONF, 'artifact_type:' + af.get_type_name()).default_store
|
||||||
|
# use global parameter if default store isn't set per artifact type
|
||||||
|
if default_store is None:
|
||||||
|
default_store = CONF.glance_store.default_store
|
||||||
|
|
||||||
location_uri, size, checksums = store_api.save_blob_to_store(
|
location_uri, size, checksums = store_api.save_blob_to_store(
|
||||||
blob_id, data, context, af.get_max_blob_size(blob_dict),
|
blob_id, data, context, af.get_max_blob_size(blob_dict),
|
||||||
default_store)
|
default_store)
|
||||||
|
@ -103,10 +103,13 @@ class ArtifactRegistry(vo_base.VersionedObjectRegistry):
|
|||||||
supported_types = []
|
supported_types = []
|
||||||
for module in modules:
|
for module in modules:
|
||||||
supported_types.extend(get_subclasses(module, base.BaseArtifact))
|
supported_types.extend(get_subclasses(module, base.BaseArtifact))
|
||||||
types = [t.partition(':')[0] for t in CONF.enabled_artifact_types]
|
|
||||||
for type_name in set(types + ['all']):
|
for type_name in set(CONF.enabled_artifact_types + ['all']):
|
||||||
for af_type in supported_types:
|
for af_type in supported_types:
|
||||||
if type_name == af_type.get_type_name():
|
if type_name == af_type.get_type_name():
|
||||||
|
CONF.register_opts(
|
||||||
|
af_type.artifact_type_opts,
|
||||||
|
group='artifact_type:' + type_name)
|
||||||
cls.register(af_type)
|
cls.register(af_type)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -56,7 +56,8 @@ _artifacts_opts = [
|
|||||||
registry.ArtifactRegistry.register_all_artifacts()
|
registry.ArtifactRegistry.register_all_artifacts()
|
||||||
for af_type in registry.ArtifactRegistry.obj_classes().values():
|
for af_type in registry.ArtifactRegistry.obj_classes().values():
|
||||||
_artifacts_opts.append(
|
_artifacts_opts.append(
|
||||||
(af_type[0].get_type_name(), af_type[0].list_artifact_type_opts()))
|
('artifact_type:' + af_type[0].get_type_name(),
|
||||||
|
af_type[0].list_artifact_type_opts()))
|
||||||
|
|
||||||
|
|
||||||
def list_artifacts_opts():
|
def list_artifacts_opts():
|
||||||
|
@ -283,6 +283,7 @@ class GlareServer(Server):
|
|||||||
self.send_identity_headers = False
|
self.send_identity_headers = False
|
||||||
self.enabled_artifact_types = ''
|
self.enabled_artifact_types = ''
|
||||||
self.custom_artifact_types_modules = ''
|
self.custom_artifact_types_modules = ''
|
||||||
|
self.artifact_type_section = ''
|
||||||
|
|
||||||
self.conf_base = """[DEFAULT]
|
self.conf_base = """[DEFAULT]
|
||||||
debug = %(debug)s
|
debug = %(debug)s
|
||||||
@ -307,6 +308,7 @@ filesystem_store_datadir=%(blob_dir)s
|
|||||||
default_store = %(default_store)s
|
default_store = %(default_store)s
|
||||||
[database]
|
[database]
|
||||||
connection = %(sql_connection)s
|
connection = %(sql_connection)s
|
||||||
|
%(artifact_type_section)s
|
||||||
"""
|
"""
|
||||||
self.paste_conf_base = """[pipeline:glare-api]
|
self.paste_conf_base = """[pipeline:glare-api]
|
||||||
pipeline = faultwrapper versionnegotiation trustedauth glarev1api
|
pipeline = faultwrapper versionnegotiation trustedauth glarev1api
|
||||||
|
@ -20,7 +20,22 @@ from glare.tests.functional import base
|
|||||||
|
|
||||||
|
|
||||||
class TestMultiStore(base.TestArtifact):
|
class TestMultiStore(base.TestArtifact):
|
||||||
enabled_types = (u'sample_artifact:database',)
|
|
||||||
|
def setUp(self):
|
||||||
|
base.functional.FunctionalTest.setUp(self)
|
||||||
|
|
||||||
|
self.set_user('user1')
|
||||||
|
self.glare_server.deployment_flavor = 'noauth'
|
||||||
|
|
||||||
|
self.glare_server.enabled_artifact_types = 'sample_artifact'
|
||||||
|
self.glare_server.custom_artifact_types_modules = (
|
||||||
|
'glare.tests.sample_artifact')
|
||||||
|
self.glare_server.artifact_type_section = """
|
||||||
|
[artifact_type:sample_artifact]
|
||||||
|
delayed_delete = False
|
||||||
|
default_store = database
|
||||||
|
"""
|
||||||
|
self.start_servers(**self.__dict__.copy())
|
||||||
|
|
||||||
def test_blob_dicts(self):
|
def test_blob_dicts(self):
|
||||||
# Getting empty artifact list
|
# Getting empty artifact list
|
||||||
|
@ -89,7 +89,7 @@ class HookChecker(base.BaseArtifact):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_upload(cls, context, af, field_name, fd):
|
def validate_upload(cls, context, af, field_name, fd):
|
||||||
if CONF.hooks_artifact.in_memory_processing:
|
if getattr(CONF, 'artifact_type:hooks_artifact').in_memory_processing:
|
||||||
return file_utils.unpack_zip_archive_in_memory(
|
return file_utils.unpack_zip_archive_in_memory(
|
||||||
context, af, 'content', fd), None
|
context, af, 'content', fd), None
|
||||||
else:
|
else:
|
||||||
|
@ -21,12 +21,12 @@ from glare.db import artifact_api
|
|||||||
from glare.tests.unit import base
|
from glare.tests.unit import base
|
||||||
|
|
||||||
|
|
||||||
class TestArtifactUpdate(base.BaseTestArtifactAPI):
|
class TestArtifactDelete(base.BaseTestArtifactAPI):
|
||||||
|
|
||||||
"""Test Glare artifact deletion."""
|
"""Test Glare artifact deletion."""
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestArtifactUpdate, self).setUp()
|
super(TestArtifactDelete, self).setUp()
|
||||||
values = {'name': 'ttt', 'version': '1.0'}
|
values = {'name': 'ttt', 'version': '1.0'}
|
||||||
self.artifact = self.controller.create(
|
self.artifact = self.controller.create(
|
||||||
self.req, 'sample_artifact', values)
|
self.req, 'sample_artifact', values)
|
||||||
@ -178,7 +178,8 @@ class TestArtifactUpdate(base.BaseTestArtifactAPI):
|
|||||||
def test_delayed_delete_per_artifact_type(self, mocked_delete):
|
def test_delayed_delete_per_artifact_type(self, mocked_delete):
|
||||||
# Enable delayed delete for sample_artifact type
|
# Enable delayed delete for sample_artifact type
|
||||||
# Global parameter is disabled
|
# Global parameter is disabled
|
||||||
self.config(delayed_delete=True, group='sample_artifact')
|
self.config(delayed_delete=True,
|
||||||
|
group='artifact_type:sample_artifact')
|
||||||
# Delete artifact and check that 'delete_blob' was not called
|
# Delete artifact and check that 'delete_blob' was not called
|
||||||
self.controller.delete(self.req, 'sample_artifact',
|
self.controller.delete(self.req, 'sample_artifact',
|
||||||
self.artifact['id'])
|
self.artifact['id'])
|
||||||
@ -190,7 +191,8 @@ class TestArtifactUpdate(base.BaseTestArtifactAPI):
|
|||||||
self.assertEqual('deleted', self.artifact['status'])
|
self.assertEqual('deleted', self.artifact['status'])
|
||||||
self.assertEqual('active', self.artifact['blob']['status'])
|
self.assertEqual('active', self.artifact['blob']['status'])
|
||||||
# Disable delayed delete
|
# Disable delayed delete
|
||||||
self.config(delayed_delete=False, group='sample_artifact')
|
self.config(delayed_delete=False,
|
||||||
|
group='artifact_type:sample_artifact')
|
||||||
# Delete artifact and check that 'delete_blob' was called this time
|
# Delete artifact and check that 'delete_blob' was called this time
|
||||||
self.controller.delete(self.req, 'sample_artifact',
|
self.controller.delete(self.req, 'sample_artifact',
|
||||||
self.artifact['id'])
|
self.artifact['id'])
|
||||||
|
@ -85,7 +85,8 @@ class TestArtifactUpload(base.BaseTestArtifactAPI):
|
|||||||
BytesIO(b'aaa'), 'application/octet-stream')
|
BytesIO(b'aaa'), 'application/octet-stream')
|
||||||
|
|
||||||
def test_storage_error(self):
|
def test_storage_error(self):
|
||||||
self.config(enabled_artifact_types=['sample_artifact'])
|
self.config(default_store='filesystem',
|
||||||
|
group='artifact_type:sample_artifact')
|
||||||
with mock.patch('glance_store.backend.add_to_backend',
|
with mock.patch('glance_store.backend.add_to_backend',
|
||||||
side_effect=store_exc.GlanceStoreException):
|
side_effect=store_exc.GlanceStoreException):
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
@ -179,7 +180,8 @@ class TestArtifactUpload(base.BaseTestArtifactAPI):
|
|||||||
'dict_of_blobs/blb', BytesIO(b'aaa'), 'application/octet-stream')
|
'dict_of_blobs/blb', BytesIO(b'aaa'), 'application/octet-stream')
|
||||||
|
|
||||||
def test_blob_dict_storage_error(self):
|
def test_blob_dict_storage_error(self):
|
||||||
self.config(enabled_artifact_types=['sample_artifact'])
|
self.config(default_store='filesystem',
|
||||||
|
group='artifact_type:sample_artifact')
|
||||||
with mock.patch('glance_store.backend.add_to_backend',
|
with mock.patch('glance_store.backend.add_to_backend',
|
||||||
side_effect=store_exc.GlanceStoreException):
|
side_effect=store_exc.GlanceStoreException):
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
|
@ -82,7 +82,7 @@ class BaseTestCase(testtools.TestCase):
|
|||||||
'glare.tests.hooks_artifact'
|
'glare.tests.hooks_artifact'
|
||||||
],
|
],
|
||||||
enabled_artifact_types=[
|
enabled_artifact_types=[
|
||||||
'hooks_artifact', 'sample_artifact:database', 'images',
|
'hooks_artifact', 'sample_artifact', 'images',
|
||||||
'heat_templates', 'heat_environments', 'murano_packages',
|
'heat_templates', 'heat_environments', 'murano_packages',
|
||||||
'tosca_templates']
|
'tosca_templates']
|
||||||
)
|
)
|
||||||
@ -165,3 +165,5 @@ class BaseTestArtifactAPI(BaseTestCase):
|
|||||||
super(BaseTestArtifactAPI, self).setUp()
|
super(BaseTestArtifactAPI, self).setUp()
|
||||||
self.controller = resource.ArtifactsController()
|
self.controller = resource.ArtifactsController()
|
||||||
self.req = self.get_fake_request(user=self.users['user1'])
|
self.req = self.get_fake_request(user=self.users['user1'])
|
||||||
|
self.config(default_store='database',
|
||||||
|
group='artifact_type:sample_artifact')
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
from glare import engine
|
||||||
from glare.objects.meta import registry
|
from glare.objects.meta import registry
|
||||||
from glare.tests.unit import base
|
from glare.tests.unit import base
|
||||||
|
|
||||||
@ -21,18 +22,23 @@ class TestMultistore(base.BaseTestCase):
|
|||||||
|
|
||||||
def test_multistore(self):
|
def test_multistore(self):
|
||||||
types = {'images': 'swift',
|
types = {'images': 'swift',
|
||||||
'heat_templates': 'rbd', 'heat_environments': '',
|
'heat_templates': 'rbd', 'heat_environments': 'file',
|
||||||
'tosca_templates': 'sheepdog',
|
'tosca_templates': 'sheepdog',
|
||||||
'murano_packages': 'vmware_store',
|
'murano_packages': 'vsphere',
|
||||||
'sample_artifact': 'database',
|
'sample_artifact': 'database',
|
||||||
'hooks_artifact': 'database'}
|
'hooks_artifact': 'database'}
|
||||||
|
|
||||||
self.config(
|
# create engine and register new artifact types
|
||||||
enabled_artifact_types=[":".join(_) for _ in types.items()])
|
engine.Engine()
|
||||||
registry.ArtifactRegistry.register_all_artifacts()
|
|
||||||
|
for type_name, store in types.items():
|
||||||
|
self.config(default_store=store,
|
||||||
|
group='artifact_type:' + type_name)
|
||||||
|
|
||||||
for t in registry.ArtifactRegistry.obj_classes().values():
|
for t in registry.ArtifactRegistry.obj_classes().values():
|
||||||
name = t[0].get_type_name()
|
name = t[0].get_type_name()
|
||||||
if name == 'all':
|
if name == 'all':
|
||||||
continue
|
continue
|
||||||
self.assertEqual(t[0].get_default_store(), types[name])
|
self.assertEqual(
|
||||||
|
getattr(base.CONF, 'artifact_type:' + name).default_store,
|
||||||
|
types[name])
|
||||||
|
@ -48,7 +48,7 @@ class TestArtifactHooks(base.BaseTestArtifactAPI):
|
|||||||
def test_upload_hook_inmemory(self):
|
def test_upload_hook_inmemory(self):
|
||||||
# enable in-memory processing
|
# enable in-memory processing
|
||||||
self.config(in_memory_processing=True,
|
self.config(in_memory_processing=True,
|
||||||
group='hooks_artifact')
|
group='artifact_type:hooks_artifact')
|
||||||
|
|
||||||
# First check uploading with smaller limit fails
|
# First check uploading with smaller limit fails
|
||||||
with mock.patch('glare.objects.meta.file_utils.'
|
with mock.patch('glare.objects.meta.file_utils.'
|
||||||
|
Loading…
Reference in New Issue
Block a user