Add I18n-related unit tests (Part 3)
This CR is the first of several dependent CRs that break up the overall tests added via this abandoned CR: https://review.openstack.org/#/c/139894 This CR adds the new database/repository unit tests to the 'repository' package. Change-Id: Iccb24665801e9c207fa90acbaa4ce6e24088cf90
This commit is contained in:
parent
76b228645d
commit
010b397e9e
@ -1,7 +1,9 @@
|
||||
[run]
|
||||
branch = True
|
||||
omit = etc/*,setup.py,*egg*,.tox/*,barbican/tests/*,*barbican/openstack/*,
|
||||
functionaltests/*,contrib/*, barbican/model/migration/alembic_migrations/versions/*
|
||||
functionaltests/*,contrib/*,
|
||||
barbican/model/migration/alembic_migrations/versions/*,
|
||||
barbican/plugin/dogtag.py, barbican/plugin/symantec.py
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
||||
|
@ -69,10 +69,9 @@ def enforce_rbac(action_name='default'):
|
||||
# context placed here by context.py
|
||||
# middleware
|
||||
ctx = _get_barbican_context(pecan.request)
|
||||
external_project_id = None
|
||||
if ctx:
|
||||
external_project_id = ctx.project
|
||||
else:
|
||||
external_project_id = None
|
||||
|
||||
_do_enforce_rbac(pecan.request, action_name, ctx)
|
||||
# insert external_project_id as the first arg to the guarded method
|
||||
|
@ -115,9 +115,9 @@ class ContextMiddleware(BaseContextMiddleware):
|
||||
catalog_header = req.headers.get('X-Service-Catalog')
|
||||
service_catalog = json.loads(catalog_header)
|
||||
except ValueError:
|
||||
LOG.exception(u._LE('Problem processing X-Service-Catalog'))
|
||||
raise webob.exc.HTTPInternalServerError(
|
||||
u._('Invalid service catalog json.'))
|
||||
msg = u._('Problem processing X-Service-Catalog')
|
||||
LOG.exception(msg)
|
||||
raise webob.exc.HTTPInternalServerError(msg)
|
||||
|
||||
kwargs = {
|
||||
'user': req.headers.get('X-User-Id'),
|
||||
|
@ -6,9 +6,9 @@
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: barbican 2015.1.dev86.g7b38b5d\n"
|
||||
"Project-Id-Version: barbican 2015.1.dev103.gb90a40b\n"
|
||||
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
|
||||
"POT-Creation-Date: 2014-12-07 19:03-0600\n"
|
||||
"POT-Creation-Date: 2014-12-13 20:17-0600\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
@ -117,15 +117,15 @@ msgstr ""
|
||||
msgid "Container deletion"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/api/controllers/containers.py:113
|
||||
#: barbican/api/controllers/containers.py:114
|
||||
msgid "Containers(s) retrieval"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/api/controllers/containers.py:151
|
||||
#: barbican/api/controllers/containers.py:152
|
||||
msgid "Container creation"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/api/controllers/containers.py:174
|
||||
#: barbican/api/controllers/containers.py:175
|
||||
msgid "Secret provided for '{secret_name}' doesn't exist."
|
||||
msgstr ""
|
||||
|
||||
@ -260,8 +260,8 @@ msgid ""
|
||||
"This only applies when using ContextMiddleware."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/api/middleware/context.py:120
|
||||
msgid "Invalid service catalog json."
|
||||
#: barbican/api/middleware/context.py:118
|
||||
msgid "Problem processing X-Service-Catalog"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/api/middleware/context.py:143
|
||||
@ -646,61 +646,67 @@ msgstr ""
|
||||
msgid "transport_key must be provided"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:398
|
||||
#: barbican/model/repositories.py:168
|
||||
msgid "No SQL connection configured"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:182
|
||||
msgid ""
|
||||
"Error configuring registry database with supplied sql_connection. Got "
|
||||
"error: {error}"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:410
|
||||
msgid "Must supply non-None {entity_name}."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:404
|
||||
msgid "Must supply {entity_name} with id=None(i.e. new entity)."
|
||||
#: barbican/model/repositories.py:416
|
||||
msgid "Must supply {entity_name} with id=None (i.e. new entity)."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:505
|
||||
#: barbican/model/repositories.py:492
|
||||
msgid "{entity_name} status is required."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:510
|
||||
#: barbican/model/repositories.py:497
|
||||
msgid "Invalid status '{status}' for {entity_name}."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:571
|
||||
#: barbican/model/repositories.py:517
|
||||
msgid "{entity_name} is missing query build method for get project entities."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:623
|
||||
#: barbican/model/repositories.py:569
|
||||
#, python-format
|
||||
msgid "Error deleting project entities for project_id=%s"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:657
|
||||
#: barbican/model/repositories.py:600
|
||||
msgid "No {entity_name} found with keystone-ID {id}"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:864
|
||||
#: barbican/model/repositories.py:794
|
||||
msgid "Tried to register crypto plugin with null or empty name."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1231
|
||||
#: barbican/model/repositories.py:1131
|
||||
msgid "Could not find {entity_name}"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1237
|
||||
msgid "Found more than one {entity_name}"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1380
|
||||
#: barbican/model/repositories.py:1265
|
||||
msgid "No {entity} found with ID {id}"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1386
|
||||
#: barbican/model/repositories.py:1271
|
||||
msgid "Entity ID {entity_id} not found"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1392
|
||||
msgid "No {entity_name}'s found"
|
||||
#: barbican/model/repositories.py:1277
|
||||
msgid "No entities of type {entity_name} found"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/model/repositories.py:1398
|
||||
msgid "Entity ID {entity_id} already exists!"
|
||||
#: barbican/model/repositories.py:1283
|
||||
msgid "Entity '{entity_name}' already exists"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/openstack/common/eventlet_backdoor.py:142
|
||||
@ -1179,7 +1185,7 @@ msgid "Extension namespace to search for eventing plugins."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/certificate_manager.py:91
|
||||
msgid "Certificate plugin \"{name}\" not found or configured."
|
||||
msgid "Certificate plugin \"{name}\" not found."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/certificate_manager.py:95
|
||||
@ -1187,11 +1193,11 @@ msgid "Certificate plugin not found or configured."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/certificate_manager.py:103
|
||||
msgid "Certificate event plugin \"{name}\" not found or configured."
|
||||
msgid "Certificate event plugin \"{name}\" not found."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/certificate_manager.py:107
|
||||
msgid "Certificate event plugin not found or configured."
|
||||
msgid "Certificate event plugin not found."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/certificate_manager.py:115
|
||||
@ -1227,7 +1233,7 @@ msgid "Secret store plugin not found for requested operation."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/secret_store.py:67
|
||||
msgid "Secret Content Type of '{content_type}' not supported"
|
||||
msgid "A Content-Type of '{content_type}' for secrets is not supported"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/plugin/interface/secret_store.py:78
|
||||
@ -1335,8 +1341,11 @@ msgstr ""
|
||||
msgid "Update Order"
|
||||
msgstr ""
|
||||
|
||||
#: barbican/tests/tasks/test_resources.py:142
|
||||
#: barbican/tests/tasks/test_resources.py:296
|
||||
#: barbican/tests/tasks/test_resources.py:146
|
||||
msgid "Process TypeOrder failure seen - please contact site administrator."
|
||||
msgstr ""
|
||||
|
||||
#: barbican/tests/tasks/test_resources.py:223
|
||||
msgid "Update Order failure seen - please contact site administrator."
|
||||
msgstr ""
|
||||
|
||||
|
@ -638,11 +638,13 @@ class ContainerConsumerMetadatum(BASE, ModelBase):
|
||||
sa.Index('values_index', 'container_id', 'name', 'URL')
|
||||
)
|
||||
|
||||
def __init__(self, container_id, parsed_request=None):
|
||||
def __init__(self, container_id, parsed_request):
|
||||
"""Registers a Consumer to a Container."""
|
||||
super(ContainerConsumerMetadatum, self).__init__()
|
||||
|
||||
if parsed_request:
|
||||
# TODO(john-wood-w) This class should really be immutable due to the
|
||||
# data_hash attribute.
|
||||
if container_id and parsed_request:
|
||||
self.container_id = container_id
|
||||
self.name = parsed_request.get('name')
|
||||
self.URL = parsed_request.get('URL')
|
||||
|
@ -90,7 +90,7 @@ class CertificatePluginNotFound(exception.BarbicanException):
|
||||
if plugin_name:
|
||||
message = u._(
|
||||
'Certificate plugin "{name}"'
|
||||
' not found or configured.').format(name=plugin_name)
|
||||
' not found.').format(name=plugin_name)
|
||||
else:
|
||||
message = u._("Certificate plugin not found or configured.")
|
||||
super(CertificatePluginNotFound, self).__init__(message)
|
||||
@ -101,10 +101,10 @@ class CertificateEventPluginNotFound(exception.BarbicanException):
|
||||
def __init__(self, plugin_name=None):
|
||||
if plugin_name:
|
||||
message = u._(
|
||||
'Certificate event plugin "{name}" not found or '
|
||||
'configured.').format(name=plugin_name)
|
||||
'Certificate event plugin "{name}" '
|
||||
'not found.').format(name=plugin_name)
|
||||
else:
|
||||
message = u._("Certificate event plugin not found or configured.")
|
||||
message = u._("Certificate event plugin not found.")
|
||||
super(CertificateEventPluginNotFound, self).__init__(message)
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ class SecretContentTypeNotSupportedException(exception.BarbicanException):
|
||||
"""Raised when support for payload content type is not available."""
|
||||
def __init__(self, content_type):
|
||||
super(SecretContentTypeNotSupportedException, self).__init__(
|
||||
u._("Secret Content Type of '{content_type}' "
|
||||
u._("A Content-Type of '{content_type}' for secrets is "
|
||||
"not supported").format(
|
||||
content_type=content_type)
|
||||
)
|
||||
|
@ -38,31 +38,42 @@ class KeystoneEventConsumer(resources.BaseTask):
|
||||
def __init__(self, project_repo=None, order_repo=None,
|
||||
secret_repo=None, project_secret_repo=None,
|
||||
datum_repo=None, kek_repo=None, secret_meta_repo=None,
|
||||
container_repo=None):
|
||||
container_repo=None, repositories=None,
|
||||
db_start=rep.start, db_commit=rep.commit,
|
||||
db_rollback=rep.rollback, db_clear=rep.clear):
|
||||
LOG.debug('Creating KeystoneEventConsumer task processor')
|
||||
self.repos = rep.Repositories(project_repo=project_repo,
|
||||
order_repo=order_repo,
|
||||
secret_repo=secret_repo,
|
||||
project_secret_repo=project_secret_repo,
|
||||
datum_repo=datum_repo,
|
||||
kek_repo=kek_repo,
|
||||
secret_meta_repo=secret_meta_repo,
|
||||
container_repo=container_repo)
|
||||
|
||||
self.db_start = db_start
|
||||
self.db_commit = db_commit
|
||||
self.db_rollback = db_rollback
|
||||
self.db_clear = db_clear
|
||||
|
||||
self.repos = repositories
|
||||
if not repositories:
|
||||
self.repos = rep.Repositories(
|
||||
project_repo=project_repo,
|
||||
order_repo=order_repo,
|
||||
secret_repo=secret_repo,
|
||||
project_secret_repo=project_secret_repo,
|
||||
datum_repo=datum_repo,
|
||||
kek_repo=kek_repo,
|
||||
secret_meta_repo=secret_meta_repo,
|
||||
container_repo=container_repo)
|
||||
|
||||
def process(self, *args, **kwargs):
|
||||
try:
|
||||
rep.start()
|
||||
self.db_start()
|
||||
super(KeystoneEventConsumer, self).process(*args, **kwargs)
|
||||
rep.commit()
|
||||
self.db_commit()
|
||||
except Exception as e:
|
||||
"""Exceptions that reach here needs to revert the entire
|
||||
transaction.
|
||||
No need to log error message as its already done earlier.
|
||||
"""
|
||||
rep.rollback()
|
||||
self.db_rollback()
|
||||
raise e
|
||||
finally:
|
||||
rep.clear()
|
||||
self.db_clear()
|
||||
|
||||
def retrieve_entity(self, project_id, resource_type=None,
|
||||
operation_type=None):
|
||||
|
@ -266,12 +266,12 @@ class UpdateOrder(BaseTask):
|
||||
secret_meta_repo=secret_meta_repo
|
||||
)
|
||||
|
||||
def retrieve_entity(self, order_id, external_project_id):
|
||||
def retrieve_entity(self, order_id, external_project_id, updated_meta):
|
||||
return self.repos.order_repo.get(
|
||||
entity_id=order_id,
|
||||
external_project_id=external_project_id)
|
||||
|
||||
def handle_processing(self, order, updated_meta):
|
||||
def handle_processing(self, order, order_id, keystone_id, updated_meta):
|
||||
self.handle_order(order, updated_meta)
|
||||
|
||||
def handle_error(self, order, status, message, exception,
|
||||
|
@ -1,16 +0,0 @@
|
||||
# Copyright (c) 2013-2014 Rackspace, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__author__ = 'john.wood'
|
0
barbican/tests/api/middleware/__init__.py
Normal file
0
barbican/tests/api/middleware/__init__.py
Normal file
54
barbican/tests/api/middleware/test_context.py
Normal file
54
barbican/tests/api/middleware/test_context.py
Normal file
@ -0,0 +1,54 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
import webob.exc
|
||||
|
||||
from barbican.api.middleware import context
|
||||
from barbican.tests import utils
|
||||
|
||||
|
||||
class WhenTestingBaseContextMiddleware(utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingBaseContextMiddleware, self).setUp()
|
||||
|
||||
def test_should_raise_attribute_error(self):
|
||||
|
||||
base = context.BaseContextMiddleware(None)
|
||||
|
||||
response = base.process_response(None)
|
||||
|
||||
self.assertIsNone(response)
|
||||
|
||||
|
||||
class WhenTestingContextMiddleware(utils.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingContextMiddleware, self).setUp()
|
||||
|
||||
def test_should_raise_attribute_error(self):
|
||||
|
||||
middle = context.ContextMiddleware(None)
|
||||
request = mock.MagicMock()
|
||||
request.headers = {
|
||||
'X-Service-Catalog': 'force json error'
|
||||
}
|
||||
|
||||
exception_result = self.assertRaises(
|
||||
webob.exc.HTTPInternalServerError,
|
||||
middle._get_authenticated_context,
|
||||
request)
|
||||
|
||||
self.assertEqual(
|
||||
'Problem processing X-Service-Catalog', exception_result.message)
|
@ -1,5 +1,3 @@
|
||||
# Copyright (c) 2013-2014 Rackspace, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
98
barbican/tests/api/test_init.py
Normal file
98
barbican/tests/api/test_init.py
Normal file
@ -0,0 +1,98 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
This test module tests the barbican.api.__init__.py module functionality.
|
||||
"""
|
||||
import mock
|
||||
|
||||
from barbican import api
|
||||
from barbican.common import exception
|
||||
from barbican.openstack.common import jsonutils as json
|
||||
from barbican.plugin.interface import secret_store
|
||||
from barbican.tests import utils
|
||||
|
||||
|
||||
class WhenInvokingLoadBodyFunction(utils.BaseTestCase):
|
||||
"""Tests the load_body function."""
|
||||
|
||||
def setUp(self):
|
||||
super(WhenInvokingLoadBodyFunction, self).setUp()
|
||||
|
||||
@mock.patch('pecan.abort')
|
||||
def test_should_abort_with_read_error(self, mock_pecan_abort):
|
||||
mock_pecan_abort.side_effect = ValueError('Abort!')
|
||||
|
||||
req = mock.MagicMock()
|
||||
req.body_file = mock.MagicMock()
|
||||
req.body_file.read.side_effect = IOError('Dummy IOError')
|
||||
|
||||
exception = self.assertRaises(
|
||||
ValueError, api.load_body, req)
|
||||
|
||||
self.assertEqual('Abort!', exception.message)
|
||||
|
||||
@mock.patch('pecan.abort')
|
||||
def test_should_abort_with_validation_unsupported_field(
|
||||
self, mock_pecan_abort):
|
||||
mock_pecan_abort.side_effect = ValueError('Abort!')
|
||||
|
||||
body = json.dumps({'key1': 'value1'})
|
||||
|
||||
req = mock.MagicMock()
|
||||
req.body_file = mock.MagicMock()
|
||||
req.body_file.read.return_value = body
|
||||
|
||||
validator = mock.MagicMock()
|
||||
validator.validate.side_effect = exception.UnsupportedField('Field')
|
||||
|
||||
exception_result = self.assertRaises(
|
||||
ValueError, api.load_body, req, validator=validator)
|
||||
|
||||
self.assertEqual('Abort!', exception_result.message)
|
||||
validator.validate.assert_called_once_with(json.loads(body))
|
||||
|
||||
|
||||
class WhenInvokingGenerateSafeExceptionMessageFunction(utils.BaseTestCase):
|
||||
"""Tests the generate_safe_exception_message function."""
|
||||
|
||||
def setUp(self):
|
||||
super(WhenInvokingGenerateSafeExceptionMessageFunction, self).setUp()
|
||||
|
||||
def test_handle_secret_content_type_not_supported_exception(self):
|
||||
operation = 'operation'
|
||||
content_type = 'application/octet-stream'
|
||||
test_exception = secret_store.SecretContentTypeNotSupportedException(
|
||||
content_type)
|
||||
|
||||
status, message = api.generate_safe_exception_message(
|
||||
operation, test_exception)
|
||||
|
||||
self.assertEqual(400, status)
|
||||
self.assertEqual("operation issue seen - content-type of "
|
||||
"'application/octet-stream' not "
|
||||
"supported.", message)
|
||||
|
||||
def test_handle_secret_content_encoding_not_supported_exception(self):
|
||||
operation = 'operation'
|
||||
content_encoding = 'application/octet-stream'
|
||||
test_excep = secret_store.SecretContentEncodingNotSupportedException(
|
||||
content_encoding)
|
||||
|
||||
status, message = api.generate_safe_exception_message(
|
||||
operation, test_excep)
|
||||
|
||||
self.assertEqual(400, status)
|
||||
self.assertEqual("operation issue seen - content-encoding of "
|
||||
"'application/octet-stream' not "
|
||||
"supported.", message)
|
@ -112,10 +112,12 @@ def create_container(id_ref):
|
||||
|
||||
def create_consumer(container_id, id_ref):
|
||||
"""Generate a ContainerConsumerMetadatum entity instance."""
|
||||
consumer = models.ContainerConsumerMetadatum(container_id)
|
||||
data = {
|
||||
'name': 'test name',
|
||||
'URL': 'http://test/url'
|
||||
}
|
||||
consumer = models.ContainerConsumerMetadatum(container_id, data)
|
||||
consumer.id = id_ref
|
||||
consumer.name = 'test name'
|
||||
consumer.URL = 'http://test/url'
|
||||
return consumer
|
||||
|
||||
|
||||
|
@ -28,6 +28,10 @@ class RepositoryTestCase(utils.BaseTestCase):
|
||||
Database/Repository oriented unit tests should *not* modify the global
|
||||
state in the barbican/model/repositories.py module, as this can lead to
|
||||
hard to debug errors. Instead only utilize methods in this fixture.
|
||||
|
||||
Also, database-oriented unit tests extending this class MUST NO INVOKE
|
||||
the repositories.start()/clear()/hard_reset() methods!*, otherwise *VERY*
|
||||
hard to debug 'Broken Pipe' errors could result!
|
||||
"""
|
||||
def setUp(self):
|
||||
super(RepositoryTestCase, self).setUp()
|
||||
@ -35,7 +39,7 @@ class RepositoryTestCase(utils.BaseTestCase):
|
||||
# Ensure we are using in-memory SQLite database, and creating tables.
|
||||
repositories.CONF.set_override("sql_connection", "sqlite:///:memory:")
|
||||
repositories.CONF.set_override("db_auto_create", True)
|
||||
repositories.CONF.set_override("debug", True)
|
||||
repositories.CONF.set_override("debug", False)
|
||||
|
||||
# Ensure the connection is completely closed, so any previous in-memory
|
||||
# database can be removed prior to starting the next test run.
|
||||
|
127
barbican/tests/model/repositories/test_repositories_consumers.py
Normal file
127
barbican/tests/model/repositories/test_repositories_consumers.py
Normal file
@ -0,0 +1,127 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils as utils
|
||||
|
||||
|
||||
class WhenTestingContainerConsumerRepository(utils.RepositoryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingContainerConsumerRepository, self).setUp()
|
||||
self.repo = repositories.ContainerConsumerRepo()
|
||||
self.repo_container = repositories.ContainerRepo()
|
||||
|
||||
def test_should_update_with_duplicate_consumer(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
project = models.Project()
|
||||
project.external_id = "my keystone id"
|
||||
project.save(session=session)
|
||||
|
||||
container = models.Container()
|
||||
container.project_id = project.id
|
||||
container.save(session=session)
|
||||
|
||||
# Create a consumer.
|
||||
consumer = models.ContainerConsumerMetadatum(
|
||||
container.id, {'name': 'name', 'URL': 'www.foo.com'})
|
||||
consumer.save(session=session)
|
||||
|
||||
# Commit things so far, because the 'create_or_update_from' call below
|
||||
# will handle consumer metadata with same composite key items already
|
||||
# existing, and then rollback this session's transaction, which would
|
||||
# remove the items added above and result in a not-found error below.
|
||||
session.commit()
|
||||
|
||||
# Try to create a consumer on the container...should re-use the
|
||||
# one added above.
|
||||
consumer2 = models.ContainerConsumerMetadatum(
|
||||
container.id, {'name': 'name', 'URL': 'www.foo.com'})
|
||||
self.repo.create_or_update_from(consumer2, container, session=session)
|
||||
|
||||
container2 = self.repo_container.get(
|
||||
container.id, project.external_id, session=session)
|
||||
self.assertEqual(1, len(container2.consumers))
|
||||
|
||||
def test_should_raise_duplicate_create_same_composite_key_no_id(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
project = models.Project()
|
||||
project.external_id = "my keystone id"
|
||||
project.save(session=session)
|
||||
|
||||
container = models.Container()
|
||||
container.project_id = project.id
|
||||
container.save(session=session)
|
||||
|
||||
# Create a consumer.
|
||||
consumer = models.ContainerConsumerMetadatum(
|
||||
container.id, {'name': 'name', 'URL': 'www.foo.com'})
|
||||
consumer.save(session=session)
|
||||
|
||||
# Commit things so far, because the 'create_from' call below will
|
||||
# handle consumer metadata with same composite key items already
|
||||
# existing, and then rollback this session's transaction, which would
|
||||
# remove the items added above and result in a not-found error below.
|
||||
session.commit()
|
||||
|
||||
# Create a new entity with the same composite key as the first one.
|
||||
consumer2 = models.ContainerConsumerMetadatum(
|
||||
container.id, {'name': 'name', 'URL': 'www.foo.com'})
|
||||
|
||||
exception_result = self.assertRaises(
|
||||
exception.Duplicate,
|
||||
self.repo.create_from,
|
||||
consumer2,
|
||||
session=session)
|
||||
self.assertEqual(
|
||||
"Entity 'ContainerConsumer' already exists",
|
||||
exception_result.message)
|
||||
|
||||
def test_should_raise_no_result_found_get_container_id(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_container_id,
|
||||
"my container id",
|
||||
session=session,
|
||||
suppress_exception=False)
|
||||
|
||||
def test_should_raise_no_result_found_get_by_values_no_deleted(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_values,
|
||||
"my container id",
|
||||
"name",
|
||||
"url",
|
||||
session=session,
|
||||
suppress_exception=False,
|
||||
show_deleted=False)
|
||||
|
||||
def test_should_raise_no_result_found_get_by_values_show_deleted(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_values,
|
||||
"my container id",
|
||||
"name",
|
||||
"url",
|
||||
session=session,
|
||||
suppress_exception=False,
|
||||
show_deleted=True)
|
@ -0,0 +1,32 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
|
||||
|
||||
class WhenTestingContainerRepository(database_utils.RepositoryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingContainerRepository, self).setUp()
|
||||
self.repo = repositories.ContainerRepo()
|
||||
|
||||
def test_should_raise_no_result_found(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_create_date,
|
||||
"my keystone id",
|
||||
session=session,
|
||||
suppress_exception=False)
|
@ -0,0 +1,69 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
|
||||
|
||||
class WhenTestingOrderRepository(database_utils.RepositoryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingOrderRepository, self).setUp()
|
||||
self.repo = repositories.OrderRepo()
|
||||
|
||||
def test_should_raise_no_result_found_no_exception(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
entities, offset, limit, total = self.repo.get_by_create_date(
|
||||
"my keystone id",
|
||||
session=session,
|
||||
suppress_exception=True)
|
||||
|
||||
self.assertEqual([], entities)
|
||||
self.assertEqual(0, offset)
|
||||
self.assertEqual(cfg.CONF.default_limit_paging, limit)
|
||||
self.assertEqual(0, total)
|
||||
|
||||
def test_should_raise_no_result_found_with_exceptions(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_create_date,
|
||||
"my keystone id",
|
||||
session=session,
|
||||
suppress_exception=False)
|
||||
|
||||
def test_get_order(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
project = models.Project()
|
||||
project.external_id = "my keystone id"
|
||||
project.save(session=session)
|
||||
|
||||
order = models.Order()
|
||||
order.project_id = project.id
|
||||
self.repo.create_from(order, session=session)
|
||||
|
||||
session.commit()
|
||||
|
||||
order_from_get = self.repo.get(
|
||||
order.id,
|
||||
external_project_id="my keystone id",
|
||||
session=session,
|
||||
)
|
||||
|
||||
self.assertEqual(order.id, order_from_get.id)
|
@ -0,0 +1,49 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
|
||||
|
||||
class WhenTestingProjectRepository(database_utils.RepositoryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingProjectRepository, self).setUp()
|
||||
self.repo = repositories.ProjectRepo()
|
||||
|
||||
def test_should_create_retrieve_deleted_project(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
project = models.Project()
|
||||
project.keystone_id = 'my keystone id'
|
||||
project.status = models.States.ACTIVE
|
||||
self.repo.create_from(project, session=session)
|
||||
self.assertIsNotNone(project.id)
|
||||
self.assertFalse(project.deleted)
|
||||
|
||||
project_get = self.repo.get(project.id)
|
||||
self.assertEqual(project.id, project_get.id)
|
||||
|
||||
self.repo.delete_entity_by_id(project.id, 'my keystone id')
|
||||
self.assertTrue(project.deleted)
|
||||
|
||||
def test_should_raise_no_result_found(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.find_by_external_project_id,
|
||||
"my keystone id",
|
||||
session=session,
|
||||
suppress_exception=False)
|
140
barbican/tests/model/repositories/test_repositories_secrets.py
Normal file
140
barbican/tests/model/repositories/test_repositories_secrets.py
Normal file
@ -0,0 +1,140 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
from barbican.tests import utils
|
||||
|
||||
|
||||
@utils.parameterized_test_case
|
||||
class WhenTestingSecretRepository(database_utils.RepositoryTestCase):
|
||||
|
||||
dataset_for_filter_tests = {
|
||||
'query_by_name': {
|
||||
'secret_1_dict': dict(name="name1"),
|
||||
'secret_2_dict': dict(name="name2"),
|
||||
'query_dict': dict(name="name1")
|
||||
},
|
||||
'query_by_algorithm': {
|
||||
'secret_1_dict': dict(algorithm="algorithm1"),
|
||||
'secret_2_dict': dict(algorithm="algorithm2"),
|
||||
'query_dict': dict(alg="algorithm1")
|
||||
},
|
||||
'query_by_mode': {
|
||||
'secret_1_dict': dict(mode="mode1"),
|
||||
'secret_2_dict': dict(mode="mode2"),
|
||||
'query_dict': dict(mode="mode1")
|
||||
},
|
||||
'query_by_bit_length': {
|
||||
'secret_1_dict': dict(bit_length=1024),
|
||||
'secret_2_dict': dict(bit_length=2048),
|
||||
'query_dict': dict(bits=1024)
|
||||
},
|
||||
}
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingSecretRepository, self).setUp()
|
||||
self.repo = repositories.SecretRepo()
|
||||
|
||||
def test_get_by_create_date(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
secret = self.repo.create_from(models.Secret(), session=session)
|
||||
project = models.Project()
|
||||
project.external_id = "my keystone id"
|
||||
project.save(session=session)
|
||||
|
||||
project_secret = models.ProjectSecret()
|
||||
project_secret.secret_id = secret.id
|
||||
project_secret.project_id = project.id
|
||||
project_secret.save(session=session)
|
||||
|
||||
session.commit()
|
||||
|
||||
secrets, offset, limit, total = self.repo.get_by_create_date(
|
||||
"my keystone id",
|
||||
session=session,
|
||||
)
|
||||
|
||||
self.assertEqual([s.id for s in secrets], [secret.id])
|
||||
self.assertEqual(offset, 0)
|
||||
self.assertEqual(limit, 10)
|
||||
self.assertEqual(total, 1)
|
||||
|
||||
@utils.parameterized_dataset(dataset_for_filter_tests)
|
||||
def test_get_by_create_date_with_filter(
|
||||
self, secret_1_dict, secret_2_dict, query_dict):
|
||||
session = self.repo.get_session()
|
||||
|
||||
secret1 = self.repo.create_from(
|
||||
models.Secret(secret_1_dict),
|
||||
session=session,
|
||||
)
|
||||
secret2 = self.repo.create_from(
|
||||
models.Secret(secret_2_dict),
|
||||
session=session,
|
||||
)
|
||||
project = models.Project()
|
||||
project.external_id = "my keystone id"
|
||||
project.save(session=session)
|
||||
|
||||
project_secret1 = models.ProjectSecret()
|
||||
project_secret1.secret_id = secret1.id
|
||||
project_secret1.project_id = project.id
|
||||
project_secret1.save(session=session)
|
||||
|
||||
project_secret2 = models.ProjectSecret()
|
||||
project_secret2.secret_id = secret2.id
|
||||
project_secret2.project_id = project.id
|
||||
project_secret2.save(session=session)
|
||||
|
||||
session.commit()
|
||||
|
||||
secrets, offset, limit, total = self.repo.get_by_create_date(
|
||||
"my keystone id",
|
||||
session=session,
|
||||
**query_dict
|
||||
)
|
||||
|
||||
self.assertEqual([s.id for s in secrets], [secret1.id])
|
||||
self.assertEqual(offset, 0)
|
||||
self.assertEqual(limit, 10)
|
||||
self.assertEqual(total, 1)
|
||||
|
||||
def test_get_by_create_date_nothing(self):
|
||||
session = self.repo.get_session()
|
||||
secrets, offset, limit, total = self.repo.get_by_create_date(
|
||||
"my keystone id",
|
||||
bits=1024,
|
||||
session=session,
|
||||
suppress_exception=True
|
||||
)
|
||||
|
||||
self.assertEqual(secrets, [])
|
||||
self.assertEqual(offset, 0)
|
||||
self.assertEqual(limit, 10)
|
||||
self.assertEqual(total, 0)
|
||||
|
||||
def test_do_entity_name(self):
|
||||
self.assertEqual(self.repo._do_entity_name(), "Secret")
|
||||
|
||||
def test_should_raise_no_result_found(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_create_date,
|
||||
"my keystone id",
|
||||
session=session,
|
||||
suppress_exception=False)
|
@ -0,0 +1,41 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.model import repositories
|
||||
from barbican.tests import database_utils
|
||||
|
||||
|
||||
class WhenTestingTransportKeyRepository(database_utils.RepositoryTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(WhenTestingTransportKeyRepository, self).setUp()
|
||||
self.repo = repositories.TransportKeyRepo()
|
||||
|
||||
def test_should_raise_no_result_found_with_plugin_name(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_create_date,
|
||||
plugin_name="plugin",
|
||||
session=session,
|
||||
suppress_exception=False)
|
||||
|
||||
def test_should_raise_no_result_found_no_plugin_name(self):
|
||||
session = self.repo.get_session()
|
||||
|
||||
self.assertRaises(
|
||||
exception.NotFound,
|
||||
self.repo.get_by_create_date,
|
||||
session=session,
|
||||
suppress_exception=False)
|
@ -119,6 +119,25 @@ class WhenTestingSecretStorePluginManager(utils.BaseTestCase):
|
||||
keySpec,
|
||||
)
|
||||
|
||||
def test_get_store_no_plugin_found_by_name(self):
|
||||
plugin = TestSecretStore([str.KeyAlgorithm.AES])
|
||||
plugin_mock = mock.MagicMock(obj=plugin)
|
||||
self.manager.extensions = [plugin_mock]
|
||||
|
||||
keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128)
|
||||
plugin_name = 'plugin'
|
||||
|
||||
exception_result = self.assertRaises(
|
||||
str.SecretStorePluginNotFound,
|
||||
self.manager.get_plugin_store,
|
||||
keySpec,
|
||||
plugin_name=plugin_name
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
'Secret store plugin "{name}" not found.'.format(name=plugin_name),
|
||||
exception_result.message)
|
||||
|
||||
def test_get_generate_no_plugin_found(self):
|
||||
self.manager.extensions = []
|
||||
keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128)
|
||||
|
@ -15,50 +15,31 @@
|
||||
import uuid
|
||||
|
||||
import mock
|
||||
from oslo.config import cfg
|
||||
import sqlalchemy
|
||||
|
||||
from barbican.common import exception
|
||||
from barbican.common import resources as c_resources
|
||||
from barbican.model import models
|
||||
from barbican.model import repositories as rep
|
||||
from barbican.plugin.crypto import manager
|
||||
from barbican.plugin import resources as plugin
|
||||
from barbican.tasks import keystone_consumer as consumer
|
||||
from barbican.tests.queue import test_keystone_listener as listener_test
|
||||
from barbican.tests import utils
|
||||
from barbican.tests import database_utils
|
||||
|
||||
|
||||
class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
utils.BaseTestCase):
|
||||
|
||||
IN_MEM_DB_CONN_STRING = 'sqlite://'
|
||||
|
||||
def setUp(self):
|
||||
super(WhenUsingKeystoneEventConsumer, self).setUp()
|
||||
|
||||
self.conf = cfg.CONF
|
||||
self.engine = None
|
||||
self.addCleanup(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
if self.engine:
|
||||
self.engine.dispose()
|
||||
class InitializeDatabaseMixin(object):
|
||||
|
||||
def _init_memory_db_setup(self):
|
||||
# using in-memory sqlalchemy database, sqlite, instead of simulating
|
||||
# data via mocks to verify transaction behavior (like rollback when
|
||||
# error occurs in middle of delete project entities logic). This also
|
||||
# helps in verifying that project_entities related query is defined
|
||||
# correctly.
|
||||
self.opt_in_group(None, sql_connection=self.IN_MEM_DB_CONN_STRING)
|
||||
|
||||
# Force a refresh of the singleton plugin manager for each test.
|
||||
manager._PLUGIN_MANAGER = None
|
||||
manager.CONF.set_override('enabled_crypto_plugins',
|
||||
'simple_crypto',
|
||||
group='crypto')
|
||||
|
||||
self.project_id1 = uuid.uuid4().hex
|
||||
self.project_id2 = uuid.uuid4().hex
|
||||
|
||||
rep._MAKER = None
|
||||
rep._ENGINE = None
|
||||
|
||||
rep.configure_db()
|
||||
self.repos = rep.Repositories(
|
||||
project_repo=None, project_secret_repo=None, secret_repo=None,
|
||||
datum_repo=None, kek_repo=None, secret_meta_repo=None,
|
||||
@ -70,8 +51,6 @@ class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
self.project_id1, self.repos.project_repo)
|
||||
self.assertIsNotNone(self.project1_data)
|
||||
|
||||
self.engine = rep.get_engine()
|
||||
|
||||
self.project2_data = c_resources.get_or_create_project(
|
||||
self.project_id2, self.repos.project_repo)
|
||||
self.assertIsNotNone(self.project2_data)
|
||||
@ -87,11 +66,20 @@ class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
|
||||
return new_secret
|
||||
|
||||
def test_get_project_entities_lookup_call(self):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
class WhenUsingKeystoneEventConsumer(
|
||||
database_utils.RepositoryTestCase,
|
||||
InitializeDatabaseMixin):
|
||||
"""Test all but the process() method on KeystoneEventConsumer class.
|
||||
|
||||
For unit testing the process() method, use the
|
||||
WhenUsingKeystoneEventConsumerProcessMethod class.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(WhenUsingKeystoneEventConsumer, self).setUp()
|
||||
|
||||
def test_get_project_entities_lookup_call(self):
|
||||
self._init_memory_db_setup()
|
||||
secret = self._create_secret_for_project(self.project2_data)
|
||||
|
||||
@ -122,13 +110,117 @@ class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
self.repos.transport_key_repo.get_project_entities,
|
||||
project2_id)
|
||||
|
||||
@mock.patch.object(models.Project, 'delete',
|
||||
side_effect=sqlalchemy.exc.SQLAlchemyError)
|
||||
def test_delete_project_entities_alchemy_error_suppress_exception_true(
|
||||
self, mock_entity_delete):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# sqlalchemy error is suppressed here
|
||||
no_error = self.repos.project_repo.delete_project_entities(
|
||||
project1_id, suppress_exception=True)
|
||||
self.assertIsNone(no_error)
|
||||
|
||||
@mock.patch.object(models.Project, 'delete',
|
||||
side_effect=sqlalchemy.exc.SQLAlchemyError)
|
||||
def test_delete_project_entities_alchemy_error_suppress_exception_false(
|
||||
self, mock_entity_delete):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# sqlalchemy error is not suppressed here
|
||||
self.assertRaises(exception.BarbicanException,
|
||||
self.repos.project_repo.delete_project_entities,
|
||||
project1_id, suppress_exception=False)
|
||||
|
||||
def test_delete_project_entities_not_impl_error_suppress_exception_true(
|
||||
self):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# NotImplementedError is not suppressed regardless of related flag
|
||||
self.assertRaises(NotImplementedError,
|
||||
self.repos.secret_meta_repo.delete_project_entities,
|
||||
project1_id, suppress_exception=True)
|
||||
|
||||
def test_delete_project_entities_not_impl_error_suppress_exception_false(
|
||||
self):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# NotImplementedError is not suppressed regardless of related flag
|
||||
self.assertRaises(NotImplementedError,
|
||||
self.repos.secret_meta_repo.delete_project_entities,
|
||||
project1_id, suppress_exception=False)
|
||||
|
||||
def test_invoke_handle_error(self):
|
||||
task = consumer.KeystoneEventConsumer(repositories='Fake Repository')
|
||||
|
||||
project = mock.MagicMock()
|
||||
project.project_id = 'project_id'
|
||||
status = 'status'
|
||||
message = 'message'
|
||||
exception_test = ValueError('Abort!')
|
||||
resource_type = 'type'
|
||||
operation_type = 'operation'
|
||||
|
||||
task.handle_error(
|
||||
project, status, message, exception_test, project_id=None,
|
||||
resource_type=resource_type, operation_type=operation_type)
|
||||
|
||||
|
||||
class WhenUsingKeystoneEventConsumerProcessMethod(
|
||||
database_utils.RepositoryTestCase,
|
||||
InitializeDatabaseMixin):
|
||||
"""Test only the process() method on KeystoneEventConsumer class.
|
||||
|
||||
For unit testing all but the process() method, use the
|
||||
WhenUsingKeystoneEventConsumer class.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
super(WhenUsingKeystoneEventConsumerProcessMethod, self).setUp()
|
||||
|
||||
# Override the database start function as repositories.start() is
|
||||
# already invoked by the RepositoryTestCase base class setUp().
|
||||
# Similarly, override the clear function.
|
||||
self.task = consumer.KeystoneEventConsumer(
|
||||
db_start=mock.MagicMock(),
|
||||
db_clear=mock.MagicMock()
|
||||
)
|
||||
|
||||
def test_project_entities_cleanup_for_no_matching_barbican_project(self):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
result = self.task.process(project_id=self.project_id1,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
self.assertIsNone(result, 'No return is expected as result')
|
||||
|
||||
def test_project_entities_cleanup_for_missing_barbican_project(self):
|
||||
self._init_memory_db_setup()
|
||||
|
||||
result = self.task.process(project_id=None,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
self.assertIsNone(result, 'No return is expected as result')
|
||||
|
||||
@mock.patch.object(consumer.KeystoneEventConsumer, 'handle_success')
|
||||
def test_existing_project_entities_cleanup_for_plain_secret(
|
||||
self, mock_handle_success):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
@ -160,10 +252,10 @@ class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
db_kek = self.repos.kek_repo.get_project_entities(project1_id)
|
||||
self.assertEqual(1, len(db_kek))
|
||||
|
||||
task = consumer.KeystoneEventConsumer()
|
||||
result = task.process(project_id=self.project_id1,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
# task = consumer.KeystoneEventConsumer()
|
||||
result = self.task.process(project_id=self.project_id1,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
self.assertIsNone(result, 'No return is expected as result')
|
||||
|
||||
mock_handle_success.assert_called()
|
||||
@ -191,120 +283,17 @@ class WhenUsingKeystoneEventConsumer(listener_test.UtilMixin,
|
||||
self.assertEqual(0, len(db_project))
|
||||
|
||||
# Should have deleted SecretStoreMetadatum via children delete
|
||||
ex = self.assertRaises(exception.NotFound,
|
||||
self.repos.secret_meta_repo.get,
|
||||
entity_id=secret_metadata_id)
|
||||
|
||||
def test_project_entities_cleanup_for_no_matching_barbican_project(self):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
|
||||
task = consumer.KeystoneEventConsumer()
|
||||
result = task.process(project_id=self.project_id1,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
self.assertIsNone(result, 'No return is expected as result')
|
||||
|
||||
def test_project_entities_cleanup_for_missing_barbican_project(self):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
|
||||
task = consumer.KeystoneEventConsumer()
|
||||
result = task.process(project_id=None,
|
||||
resource_type='project',
|
||||
operation_type='deleted')
|
||||
self.assertIsNone(result, 'No return is expected as result')
|
||||
|
||||
@mock.patch.object(models.Project, 'delete',
|
||||
side_effect=sqlalchemy.exc.SQLAlchemyError)
|
||||
def test_delete_project_entities_alchemy_error_suppress_exception_true(
|
||||
self, mock_entity_delete):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# sqlalchemy error is suppressed here
|
||||
no_error = self.repos.project_repo.delete_project_entities(
|
||||
project1_id, suppress_exception=True)
|
||||
self.assertIsNone(no_error)
|
||||
|
||||
@mock.patch.object(models.Project, 'delete',
|
||||
side_effect=sqlalchemy.exc.SQLAlchemyError)
|
||||
def test_delete_project_entities_alchemy_error_suppress_exception_false(
|
||||
self, mock_entity_delete):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id = self.project1_data.id
|
||||
# sqlalchemy error is not suppressed here
|
||||
self.assertRaises(exception.BarbicanException,
|
||||
self.repos.project_repo.delete_project_entities,
|
||||
project1_id, suppress_exception=False)
|
||||
|
||||
def test_delete_project_entities_not_impl_error_suppress_exception_true(
|
||||
self):
|
||||
self.skipTest(
|
||||
"john-wood-w: Skipping database tests pending revised "
|
||||
"database unit testing.")
|
||||
|
||||
self._init_memory_db_setup()
|
||||
|
||||
secret = self._create_secret_for_project(self.project1_data)
|
||||
self.assertIsNotNone(secret)
|
||||
|
||||
project1_id |