Add test cases

story: 2007441
Task: #39098

- Some initial test cases
- DB cases enhanced DB error handling
- API error handling also improved
- Tools README.md bug
- New requirements cahanges for testing

Change-Id: I7f83c6472d1a86fa62e38cab2856be6c0d6bb259
Signed-off-by: Tomi Juvonen <tomi.juvonen@nokia.com>
This commit is contained in:
Tomi Juvonen 2020-03-18 18:34:53 +02:00
parent 4d1a100ba7
commit d06820b452
17 changed files with 952 additions and 131 deletions

View File

@ -1,55 +1,49 @@
# Copyright (c) 2018 OpenStack Foundation. # Copyright (c) 2013 Mirantis Inc.
# All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License");
# not use this file except in compliance with the License. You may obtain # you may not use this file except in compliance with the License.
# a copy of the License at # You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, software # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # distributed under the License is distributed on an "AS IS" BASIS,
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# License for the specific language governing permissions and limitations # implied.
# under the License. # See the License for the specific language governing permissions and
# limitations under the License.
import threading import threading
from oslo_context import context
class BaseContext(object):
_elements = set() class FenixContext(context.RequestContext):
_context_stack = threading.local() _context_stack = threading.local()
def __init__(self, __mapping=None, **kwargs): def __init__(self, user_id=None, project_id=None, project_name=None,
if __mapping is None: service_catalog=None, user_name=None, **kwargs):
self.__values = dict(**kwargs) # NOTE(neha-alhat): During serializing/deserializing context object
else: # over the RPC layer, below extra parameters which are passed by
if isinstance(__mapping, BaseContext): # `oslo.messaging` are popped as these parameters are not required.
__mapping = __mapping.__values kwargs.pop('client_timeout', None)
self.__values = dict(__mapping) kwargs.pop('user_identity', None)
self.__values.update(**kwargs) kwargs.pop('project', None)
not_supported_keys = set(self.__values) - self._elements
for k in not_supported_keys:
del self.__values[k]
def __getattr__(self, name): if user_id:
try: kwargs['user_id'] = user_id
return self.__values[name] if project_id:
except KeyError: kwargs['project_id'] = project_id
if name in self._elements:
return None
else:
raise AttributeError(name)
def __setattr__(self, name, value): super(FenixContext, self).__init__(**kwargs)
# NOTE(yorik-sar): only the very first assignment for __values is
# allowed. All context arguments should be set at the time the context self.project_name = project_name
# object is being created. self.user_name = user_name
if not self.__dict__: self.service_catalog = service_catalog or []
super(BaseContext, self).__setattr__(name, value)
else: if self.is_admin and 'admin' not in self.roles:
raise Exception(self.__dict__, name, value) self.roles.append('admin')
def __enter__(self): def __enter__(self):
try: try:
@ -73,21 +67,13 @@ class BaseContext(object):
# NOTE(yorik-sar): as long as oslo.rpc requires this # NOTE(yorik-sar): as long as oslo.rpc requires this
def to_dict(self): def to_dict(self):
return self.__values result = super(FenixContext, self).to_dict()
result['user_id'] = self.user_id
result['user_name'] = self.user_name
class FenixContext(BaseContext): result['project_id'] = self.project_id
result['project_name'] = self.project_name
_elements = set([ result['service_catalog'] = self.service_catalog
"user_id", return result
"project_id",
"auth_token",
"service_catalog",
"user_name",
"project_name",
"roles",
"is_admin",
])
@classmethod @classmethod
def elevated(cls): def elevated(cls):

View File

@ -71,9 +71,8 @@ def setup_db():
def drop_db(): def drop_db():
try: try:
engine = db_session.EngineFacade(cfg.CONF.database.connection, db_session.EngineFacade(cfg.CONF.database.connection,
sqlite_fk=True).get_engine() sqlite_fk=True).get_engine()
models.Lease.metavalues.drop_all(engine)
except Exception as e: except Exception as e:
LOG.error("Database shutdown exception: %s", e) LOG.error("Database shutdown exception: %s", e)
return False return False
@ -95,6 +94,10 @@ def not_equal(*values):
return InequalityCondition(values) return InequalityCondition(values)
def _selected_from_dict(selected, dict_source):
return {x: dict_source[x] for x in selected}
class Constraint(object): class Constraint(object):
def __init__(self, conditions): def __init__(self, conditions):
self.conditions = conditions self.conditions = conditions
@ -122,7 +125,7 @@ class InequalityCondition(object):
return [field != value for value in self.values] return [field != value for value in self.values]
# Session # Maintenance session
def _maintenance_session_get(session, session_id): def _maintenance_session_get(session, session_id):
query = model_query(models.MaintenanceSession, session) query = model_query(models.MaintenanceSession, session)
return query.filter_by(session_id=session_id).first() return query.filter_by(session_id=session_id).first()
@ -187,8 +190,8 @@ def remove_session(session_id):
msession = _maintenance_session_get(session, session_id) msession = _maintenance_session_get(session, session_id)
if not msession: if not msession:
# raise not found error # raise not found error
raise db_exc.FenixDBNotFound(session, session_id=session_id, raise db_exc.FenixDBNotFound(model="MaintenanceSession",
model='sessions') id=session_id)
session.delete(msession) session.delete(msession)
@ -216,6 +219,9 @@ def create_action_plugin(values):
values = values.copy() values = values.copy()
ap = models.MaintenanceActionPlugin() ap = models.MaintenanceActionPlugin()
ap.update(values) ap.update(values)
if action_plugin_get(ap.session_id, ap.plugin):
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=ap.plugin)
session = get_session() session = get_session()
with session.begin(): with session.begin():
@ -236,6 +242,9 @@ def create_action_plugins(values_list):
with session.begin(): with session.begin():
ap = models.MaintenanceActionPlugin() ap = models.MaintenanceActionPlugin()
ap.update(vals) ap.update(vals)
if action_plugin_get(ap.session_id, ap.plugin):
raise db_exc.FenixDBDuplicateEntry(
model=ap.__class__.__name__, columns=ap.plugin)
try: try:
ap.save(session=session) ap.save(session=session)
except common_db_exc.DBDuplicateEntry as e: except common_db_exc.DBDuplicateEntry as e:
@ -273,6 +282,15 @@ def create_action_plugin_instance(values):
ap_instance.update(values) ap_instance.update(values)
session = get_session() session = get_session()
if _action_plugin_instance_get(session,
ap_instance.session_id,
ap_instance.plugin,
ap_instance.hostname):
selected = ['session_id', 'plugin', 'hostname']
raise db_exc.FenixDBDuplicateEntry(
model=ap_instance.__class__.__name__,
columns=str(_selected_from_dict(selected,
ap_instance.to_dict())))
with session.begin(): with session.begin():
try: try:
ap_instance.save(session=session) ap_instance.save(session=session)
@ -288,6 +306,14 @@ def create_action_plugin_instance(values):
def remove_action_plugin_instance(ap_instance): def remove_action_plugin_instance(ap_instance):
session = get_session() session = get_session()
if not _action_plugin_instance_get(session,
ap_instance.session_id,
ap_instance.plugin,
ap_instance.hostname):
selected = ['session_id', 'plugin', 'hostname']
raise db_exc.FenixDBNotFound(model=ap_instance.__class__.__name__,
id=str(_selected_from_dict(selected,
ap_instance.to_dict())))
with session.begin(): with session.begin():
session.delete(ap_instance) session.delete(ap_instance)
@ -299,8 +325,8 @@ def _download_get(session, session_id, local_file):
local_file=local_file).first() local_file=local_file).first()
def download_get(session_id, plugin, download): def download_get(session_id, local_file):
return _download_get(get_session(), session_id, download) return _download_get(get_session(), session_id, local_file)
def _download_get_all(session, session_id): def _download_get_all(session, session_id):
@ -319,6 +345,11 @@ def create_downloads(values_list):
with session.begin(): with session.begin():
d = models.MaintenanceDownload() d = models.MaintenanceDownload()
d.update(vals) d.update(vals)
if _download_get(session, d.session_id, d.local_file):
selected = ['local_file']
raise db_exc.FenixDBDuplicateEntry(
model=d.__class__.__name__,
columns=str(_selected_from_dict(selected, vals)))
try: try:
d.save(session=session) d.save(session=session)
except common_db_exc.DBDuplicateEntry as e: except common_db_exc.DBDuplicateEntry as e:
@ -354,6 +385,12 @@ def create_host(values):
mhost.update(values) mhost.update(values)
session = get_session() session = get_session()
if _host_get(session, mhost.session_id, mhost.hostname):
selected = ['hostname']
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__,
columns=str(_selected_from_dict(selected,
mhost.to_dict())))
with session.begin(): with session.begin():
try: try:
mhost.save(session=session) mhost.save(session=session)
@ -372,6 +409,12 @@ def create_hosts(values_list):
with session.begin(): with session.begin():
mhost = models.MaintenanceHost() mhost = models.MaintenanceHost()
mhost.update(vals) mhost.update(vals)
if _host_get(session, mhost.session_id, mhost.hostname):
selected = ['hostname']
raise db_exc.FenixDBDuplicateEntry(
model=mhost.__class__.__name__,
columns=str(_selected_from_dict(selected,
mhost.to_dict())))
try: try:
mhost.save(session=session) mhost.save(session=session)
except common_db_exc.DBDuplicateEntry as e: except common_db_exc.DBDuplicateEntry as e:
@ -408,6 +451,12 @@ def create_project(values):
mproject.update(values) mproject.update(values)
session = get_session() session = get_session()
if _project_get(session, mproject.session_id, mproject.project_id):
selected = ['project_id']
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__,
columns=str(_selected_from_dict(selected,
mproject.to_dict())))
with session.begin(): with session.begin():
try: try:
mproject.save(session=session) mproject.save(session=session)
@ -426,6 +475,13 @@ def create_projects(values_list):
with session.begin(): with session.begin():
mproject = models.MaintenanceProject() mproject = models.MaintenanceProject()
mproject.update(vals) mproject.update(vals)
if _project_get(session, mproject.session_id,
mproject.project_id):
selected = ['project_id']
raise db_exc.FenixDBDuplicateEntry(
model=mproject.__class__.__name__,
columns=str(_selected_from_dict(selected,
mproject.to_dict())))
try: try:
mproject.save(session=session) mproject.save(session=session)
except common_db_exc.DBDuplicateEntry as e: except common_db_exc.DBDuplicateEntry as e:
@ -462,6 +518,13 @@ def create_instance(values):
minstance.update(values) minstance.update(values)
session = get_session() session = get_session()
if _instance_get(session, minstance.session_id,
minstance.instance_id):
selected = ['instance_id']
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__,
columns=str(_selected_from_dict(selected,
minstance.to_dict())))
with session.begin(): with session.begin():
try: try:
minstance.save(session=session) minstance.save(session=session)
@ -480,6 +543,13 @@ def create_instances(values_list):
with session.begin(): with session.begin():
minstance = models.MaintenanceInstance() minstance = models.MaintenanceInstance()
minstance.update(vals) minstance.update(vals)
if _instance_get(session, minstance.session_id,
minstance.instance_id):
selected = ['instance_id']
raise db_exc.FenixDBDuplicateEntry(
model=minstance.__class__.__name__,
columns=str(_selected_from_dict(selected,
minstance.to_dict())))
try: try:
minstance.save(session=session) minstance.save(session=session)
except common_db_exc.DBDuplicateEntry as e: except common_db_exc.DBDuplicateEntry as e:
@ -498,8 +568,8 @@ def remove_instance(session_id, instance_id):
if not minstance: if not minstance:
# raise not found error # raise not found error
raise db_exc.FenixDBNotFound(session, session_id=session_id, raise db_exc.FenixDBNotFound(model='MaintenanceInstance',
model='instances') id=instance_id)
session.delete(minstance) session.delete(minstance)
@ -538,8 +608,8 @@ def remove_project_instance(instance_id):
minstance = _project_instance_get(session, instance_id) minstance = _project_instance_get(session, instance_id)
if not minstance: if not minstance:
# raise not found error # raise not found error
raise db_exc.FenixDBNotFound(session, instance_id=instance_id, raise db_exc.FenixDBNotFound(model='ProjectInstance',
model='project_instances') id=instance_id)
session.delete(minstance) session.delete(minstance)
@ -596,7 +666,7 @@ def remove_instance_group(group_id):
ig = _instance_group_get(session, group_id) ig = _instance_group_get(session, group_id)
if not ig: if not ig:
# raise not found error # raise not found error
raise db_exc.FenixDBNotFound(session, group_id=group_id, raise db_exc.FenixDBNotFound(model='InstanceGroup',
model='instance_groups') id=group_id)
session.delete(ig) session.delete(ig)

View File

@ -104,7 +104,12 @@ def authorize(extension, action=None, api='fenix', ctx=None,
act = '%s:%s:%s' % (api, extension, action) act = '%s:%s:%s' % (api, extension, action)
LOG.debug("authorize policy: %s" % act) LOG.debug("authorize policy: %s" % act)
enforce(cur_ctx, act, tgt) enforce(cur_ctx, act, tgt)
return func(self, *args, **kwargs) try:
return func(self, *args, **kwargs)
except TypeError:
# TBD Invalid Method should always be caught before authorize
# This makes sure we get some feasible error
raise exceptions.NotFound(object=str(act))
return wrapped return wrapped
return decorator return decorator

View File

@ -0,0 +1,87 @@
# Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fixtures
import tempfile
import testscenarios
from oslo_config import cfg
from oslo_log import log as logging
from oslotest import base
from fenix import context
from fenix.db.sqlalchemy import api as db_api
from fenix.db.sqlalchemy import facade_wrapper
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
_DB_CACHE = None
class Database(fixtures.Fixture):
def setUp(self):
super(Database, self).setUp()
fd = tempfile.NamedTemporaryFile(delete=False)
self.db_path = fd.name
database_connection = 'sqlite:///' + self.db_path
cfg.CONF.set_override('connection', str(database_connection),
group='database')
facade_wrapper._clear_engine()
self.engine = facade_wrapper.get_engine()
db_api.setup_db()
self.addCleanup(db_api.drop_db)
class TestCase(testscenarios.WithScenarios, base.BaseTestCase):
"""Test case base class for all unit tests.
Due to the slowness of DB access, this class is not supporting DB tests.
If needed, please herit from DBTestCase instead.
"""
def setUp(self):
"""Run before each test method to initialize test environment."""
super(TestCase, self).setUp()
self.context_mock = None
cfg.CONF(args=[], project='fenix')
def patch(self, obj, attr):
"""Returns a Mocked object on the patched attribute."""
mockfixture = self.useFixture(fixtures.MockPatchObject(obj, attr))
return mockfixture.mock
def set_context(self, ctx):
if self.context_mock is None:
self.context_mock = self.patch(context.FenixContext, 'current')
self.context_mock.return_value = ctx
class DBTestCase(TestCase):
"""Test case base class for all database unit tests.
`DBTestCase` differs from TestCase in that DB access is supported.
Only tests needing DB support should herit from this class.
"""
def setUp(self):
super(DBTestCase, self).setUp()
global _DB_CACHE
if not _DB_CACHE:
_DB_CACHE = Database()
self.useFixture(_DB_CACHE)

View File

@ -1,23 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2010-2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslotest import base
class TestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""

View File

View File

View File

@ -0,0 +1,441 @@
# Copyright (c) 2020 Nokia Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from oslo_utils import uuidutils
from fenix.db import exceptions as db_exceptions
from fenix.db.sqlalchemy import api as db_api
from fenix import tests
def _get_fake_random_uuid():
return uuidutils.generate_uuid()
def _get_fake_uuid():
"""Returns a fake uuid."""
return 'aaaaaaaa-1111-bbbb-2222-cccccccccccc'
def _get_fake_uuid1():
return 'dddddddd-3333-eeee-4444-ffffffffffff'
def _get_datetime(value='2020-03-19 00:00'):
return datetime.datetime.strptime(value, '%Y-%m-%d %H:%M')
def _get_fake_session_values():
session = {
'session_id': _get_fake_uuid(),
'prev_state': None,
'state': 'MAINTENANCE',
'maintenance_at': _get_datetime(),
'meta': "{'openstack_version': 'Train'}",
'workflow': "default"}
return session
def _get_fake_session_db_values(db_object_dict):
db_session = _get_fake_session_values()
db_session['created_at'] = db_object_dict['created_at']
db_session['created_at'] = db_object_dict['created_at']
db_session['updated_at'] = None
return db_session
def _get_fake_action_plugin_values(uuid=_get_fake_uuid(),
plugin='compute_upgrade'):
adict = {'session_id': uuid,
'plugin': plugin,
'type': 'compute',
'meta': "{'os': 'Linux123', 'esw': 'OS_Train'}"}
return adict
def _get_fake_action_plugin_db_values(db_object_dict,
uuid=_get_fake_uuid(),
plugin='compute_upgrade'):
db_adict = _get_fake_action_plugin_values(uuid, plugin)
db_adict['id'] = db_object_dict['id']
db_adict['created_at'] = db_object_dict['created_at']
db_adict['updated_at'] = None
return db_adict
def _get_fake_action_plugin_instance_values(uuid=_get_fake_uuid()):
aidict = {'session_id': uuid,
'plugin': 'compute_upgrade',
'hostname': 'compute-1',
'state': 'DONE'}
return aidict
def _get_fake_action_plugin_instance_db_values(db_object_dict):
db_aidict = _get_fake_action_plugin_instance_values()
db_aidict['id'] = db_object_dict['id']
db_aidict['created_at'] = db_object_dict['created_at']
db_aidict['updated_at'] = None
return db_aidict
def _get_fake_download_values(uuid=_get_fake_uuid(),
local_file="/tmp/compute.tar.gz"):
ddict = {'session_id': uuid,
'local_file': local_file}
return ddict
def _get_fake_download_db_values(db_object_dict,
uuid=_get_fake_uuid(),
local_file="/tmp/compute.tar.gz"):
db_ddict = _get_fake_download_values(uuid, local_file)
db_ddict['id'] = db_object_dict['id']
db_ddict['created_at'] = db_object_dict['created_at']
db_ddict['updated_at'] = None
return db_ddict
def _get_fake_host_values(uuid=_get_fake_uuid(),
hostname='compute-1'):
hdict = {'session_id': uuid,
'hostname': hostname,
'type': 'compute',
'maintained': False,
'disabled': False,
'details': None,
'plugin': None,
'plugin_state': None}
return hdict
def _get_fake_host_db_values(db_object_dict, hostname='compute-1'):
db_hdict = _get_fake_host_values(hostname=hostname)
db_hdict['id'] = db_object_dict['id']
db_hdict['created_at'] = db_object_dict['created_at']
db_hdict['updated_at'] = None
return db_hdict
def _get_fake_project_values(uuid=_get_fake_uuid(),
project_id=_get_fake_uuid1()):
pdict = {'session_id': uuid,
'project_id': project_id,
'state': None}
return pdict
def _get_fake_project_db_values(db_object_dict, project_id=_get_fake_uuid1()):
db_pdict = _get_fake_project_values(project_id=project_id)
db_pdict['id'] = db_object_dict['id']
db_pdict['created_at'] = db_object_dict['created_at']
db_pdict['updated_at'] = None
return db_pdict
def _get_fake_instance_values(uuid=_get_fake_uuid(),
instance_id=_get_fake_uuid1()):
idict = {'session_id': uuid,
'instance_id': instance_id,
'state': 'Running',
'action': None,
'project_id': _get_fake_uuid1(),
'project_state': None,
'instance_name': 'Instance1',
'action_done': False,
'host': 'compute-1',
'details': None}
return idict
def _get_fake_instance_db_values(db_object_dict,
instance_id=_get_fake_uuid1()):
db_idict = _get_fake_instance_values(instance_id=instance_id)
db_idict['id'] = db_object_dict['id']
db_idict['created_at'] = db_object_dict['created_at']
db_idict['updated_at'] = None
return db_idict
def _get_fake_project_instance_values(uuid=_get_fake_uuid()):
idict = {'instance_id': uuid,
'project_id': _get_fake_uuid1(),
'group_id': _get_fake_uuid1(),
'instance_name': 'Instance1',
'max_interruption_time': 10,
'migration_type': 'LIVE_MIGRATION',
'resource_mitigation': False,
'lead_time': 30}
return idict
def _get_fake_project_instance_db_values(db_object_dict):
db_idict = _get_fake_project_instance_values()
db_idict['created_at'] = db_object_dict['created_at']
db_idict['updated_at'] = None
return db_idict
def _get_fake_instance_group_values(uuid=_get_fake_uuid()):
idict = {'group_id': uuid,
'project_id': _get_fake_uuid1(),
'group_name': 'ha_group',
'anti_affinity_group': True,
'max_instances_per_host': 1,
'max_impacted_members': 1,
'recovery_time': 10,
'resource_mitigation': False}
return idict
def _get_fake_instance_group_db_values(db_object_dict):
db_idict = _get_fake_instance_group_values()
db_idict['created_at'] = db_object_dict['created_at']
db_idict['updated_at'] = None
return db_idict
class SQLAlchemyDBApiTestCase(tests.DBTestCase):
"""Test case for SQLAlchemy DB API."""
def setUp(self):
super(SQLAlchemyDBApiTestCase, self).setUp()
# Maintenance session
def test_create_session(self):
"""Test maintenance session create
Create session and check results equals to given values.
"""
result = db_api.create_session(_get_fake_session_values())
self.assertEqual(result.to_dict(),
_get_fake_session_db_values(result.to_dict()))
# We cannot create duplicate, so no need to test
def test_remove_session(self):
"""Test maintenance session removal
Check session does not exist after removal
"""
self.assertRaises(db_exceptions.FenixDBNotFound,
db_api.remove_session, _get_fake_uuid())
db_api.create_session(_get_fake_session_values())
db_api.remove_session(_get_fake_uuid())
self.assertIsNone(
db_api.maintenance_session_get(_get_fake_uuid()))
def test_remove_session_all(self):
"""Test maintenance session removal with all tables
Remove maintenance session that includes all other
maintenance session tables.
"""
# TBD make other db cases first to make this one
pass
# Action plug-in
def test_create_action_plugin(self):
result = db_api.create_action_plugin(_get_fake_action_plugin_values())
self.assertEqual(result.to_dict(),
_get_fake_action_plugin_db_values(result.to_dict()))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_action_plugin,
_get_fake_action_plugin_values())
def test_create_action_plugins(self):
ap_list = [_get_fake_action_plugin_values(plugin='compute_upgrade'),
_get_fake_action_plugin_values(plugin='compute_os')]
results = db_api.create_action_plugins(ap_list)
for result in results:
self.assertEqual(
result.to_dict(),
_get_fake_action_plugin_db_values(result.to_dict(),
plugin=result.plugin))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_action_plugins,
[_get_fake_action_plugin_values(),
_get_fake_action_plugin_values()])
# Action plug-in instance
def test_create_action_plugin_instance(self):
result = db_api.create_action_plugin_instance(
_get_fake_action_plugin_instance_values())
self.assertEqual(result.to_dict(),
_get_fake_action_plugin_instance_db_values(
result.to_dict()))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_action_plugin_instance,
_get_fake_action_plugin_instance_values())
def test_remove_action_plugin_instance(self):
result = db_api.create_action_plugin_instance(
_get_fake_action_plugin_instance_values())
api = _get_fake_action_plugin_instance_values()
db_api.remove_action_plugin_instance(result)
self.assertIsNone(
db_api.action_plugin_instance_get(api['session_id'],
api['plugin'],
api['hostname']))
self.assertRaises(db_exceptions.FenixDBNotFound,
db_api.remove_action_plugin_instance, result)
# Download
def test_create_downloads(self):
downloads = [_get_fake_download_values(),
_get_fake_download_values(local_file="foo")]
results = db_api.create_downloads(downloads)
for result in results:
self.assertEqual(
result.to_dict(),
_get_fake_download_db_values(result.to_dict(),
local_file=result.local_file))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_downloads,
[_get_fake_download_values(),
_get_fake_download_values()])
# Host
def test_create_host(self):
result = db_api.create_host(_get_fake_host_values())
self.assertEqual(result.to_dict(),
_get_fake_host_db_values(
result.to_dict()))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_host,
_get_fake_host_values())
def test_create_hosts(self):
hosts = [_get_fake_host_values(),
_get_fake_host_values(hostname='compute-2')]
results = db_api.create_hosts(hosts)
for result in results:
self.assertEqual(
result.to_dict(),
_get_fake_host_db_values(result.to_dict(),
hostname=result.hostname))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_hosts,
[_get_fake_host_values(),
_get_fake_host_values()])
# Project
def test_create_project(self):
result = db_api.create_project(_get_fake_project_values())
self.assertEqual(result.to_dict(),
_get_fake_project_db_values(
result.to_dict()))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_project,
_get_fake_project_values())
def test_create_projects(self):
projects = [_get_fake_project_values(),
_get_fake_project_values(project_id="1234567890")]
results = db_api.create_projects(projects)
for result in results:
self.assertEqual(
result.to_dict(),
_get_fake_project_db_values(result.to_dict(),
project_id=result.project_id))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_projects,
[_get_fake_project_values(),
_get_fake_project_values()])
# Instance
def test_create_instance(self):
result = db_api.create_instance(_get_fake_instance_values())
self.assertEqual(result.to_dict(),
_get_fake_instance_db_values(
result.to_dict()))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_instance,
_get_fake_instance_values())
def test_create_instances(self):
instances = [_get_fake_instance_values(),
_get_fake_instance_values(instance_id="123456789")]
results = db_api.create_instances(instances)
for result in results:
self.assertEqual(
result.to_dict(),
_get_fake_instance_db_values(result.to_dict(),
instance_id=result.instance_id))
self.assertRaises(db_exceptions.FenixDBDuplicateEntry,
db_api.create_instances,
[_get_fake_instance_values(),
_get_fake_instance_values()])
def test_remove_instance(self):
db_api.create_instance(_get_fake_instance_values())
iv = _get_fake_instance_values()
db_api.remove_instance(iv['session_id'], iv['instance_id'])
self.assertIsNone(
db_api.instance_get(iv['session_id'], iv['instance_id']))
self.assertRaises(db_exceptions.FenixDBNotFound,
db_api.remove_instance,
iv['session_id'],
iv['instance_id'])
# Project instances
def test_update_project_instance(self):
result = db_api.update_project_instance(
_get_fake_project_instance_values())
self.assertEqual(result.to_dict(),
_get_fake_project_instance_db_values(
result.to_dict()))
# No need to test duplicate as always overwritten
def test_remove_project_instance(self):
db_api.update_project_instance(_get_fake_project_instance_values())
pi = _get_fake_project_instance_values()
db_api.remove_project_instance(pi['instance_id'])
self.assertIsNone(
db_api.project_instance_get(pi['instance_id']))
self.assertRaises(db_exceptions.FenixDBNotFound,
db_api.remove_project_instance,
pi['instance_id'])
# Instances groups
def test_update_instance_group(self):
result = db_api.update_instance_group(
_get_fake_instance_group_values())
self.assertEqual(result.to_dict(),
_get_fake_instance_group_db_values(
result.to_dict()))
# No need to test duplicate as always overwritten
def test_remove_instance_group(self):
db_api.update_instance_group(_get_fake_instance_group_values())
gi = _get_fake_instance_group_values()
db_api.remove_instance_group(gi['group_id'])
self.assertIsNone(
db_api.instance_group_get(gi['group_id']))
self.assertRaises(db_exceptions.FenixDBNotFound,
db_api.remove_instance_group,
gi['group_id'])

View File

@ -0,0 +1,34 @@
# Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from fenix.db import api as db_api
from fenix import tests
class DBApiTestCase(tests.TestCase):
"""Test case for DB API."""
def setUp(self):
super(DBApiTestCase, self).setUp()
self.db_api = db_api
self.patch(self.db_api.IMPL, "setup_db").return_value = True
self.patch(self.db_api.IMPL, "drop_db").return_value = True
def test_setup_db(self):
self.assertTrue(self.db_api.setup_db())
def test_drop_db(self):
self.assertTrue(self.db_api.drop_db())

View File

@ -0,0 +1,82 @@
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils.fixture import uuidsentinel
from fenix import context
from fenix import tests
class TestFenixContext(tests.TestCase):
def test_to_dict(self):
ctx = context.FenixContext(
user_id=111, project_id=222,
request_id='req-679033b7-1755-4929-bf85-eb3bfaef7e0b')
expected = {
'auth_token': None,
'domain': None,
'global_request_id': None,
'is_admin': False,
'is_admin_project': True,
'project': 222,
'project_domain': None,
'project_id': 222,
'project_name': None,
'read_only': False,
'request_id': 'req-679033b7-1755-4929-bf85-eb3bfaef7e0b',
'resource_uuid': None,
'roles': [],
'service_catalog': [],
'show_deleted': False,
'system_scope': None,
'tenant': 222,
'user': 111,
'user_domain': None,
'user_id': 111,
'user_identity': u'111 222 - - -',
'user_name': None}
self.assertEqual(expected, ctx.to_dict())
def test_elevated_empty(self):
ctx = context.FenixContext.elevated()
self.assertTrue(ctx.is_admin)
def test_service_catalog_default(self):
ctxt = context.FenixContext(user_id=uuidsentinel.user_id,
project_id=uuidsentinel.project_id)
self.assertEqual([], ctxt.service_catalog)
ctxt = context.FenixContext(user_id=uuidsentinel.user_id,
project_id=uuidsentinel.project_id,
service_catalog=[])
self.assertEqual([], ctxt.service_catalog)
ctxt = context.FenixContext(user_id=uuidsentinel.user_id,
project_id=uuidsentinel.project_id,
service_catalog=None)
self.assertEqual([], ctxt.service_catalog)
def test_fenix_context_elevated(self):
user_context = context.FenixContext(
user_id=uuidsentinel.user_id,
project_id=uuidsentinel.project_id, is_admin=False)
self.assertFalse(user_context.is_admin)
admin_context = user_context.elevated()
self.assertFalse(user_context.is_admin)
self.assertTrue(admin_context.is_admin)
self.assertNotIn('admin', user_context.roles)
self.assertIn('admin', admin_context.roles)

View File

@ -0,0 +1,68 @@
# Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from fenix import exceptions
from fenix import tests
class FenixExceptionTestCase(tests.TestCase):
def test_default_error_msg(self):
class FakeFenixException(exceptions.FenixException):
msg_fmt = "default message"
exc = FakeFenixException()
self.assertEqual('default message', six.text_type(exc))
def test_error_msg(self):
self.assertEqual('test',
six.text_type(exceptions.FenixException('test')))
def test_default_error_msg_with_kwargs(self):
class FakeFenixException(exceptions.FenixException):
msg_fmt = "default message: %(code)s"
exc = FakeFenixException(code=500)
self.assertEqual('default message: 500', six.text_type(exc))
self.assertEqual('default message: 500', str(exc))
def test_error_msg_exception_with_kwargs(self):
class FakeFenixException(exceptions.FenixException):
msg_fmt = "default message: %(mispelled_code)s"
exc = FakeFenixException(code=500, mispelled_code='blah')
self.assertEqual('default message: blah', six.text_type(exc))
self.assertEqual('default message: blah', str(exc))
def test_default_error_code(self):
class FakeFenixException(exceptions.FenixException):
code = 404
exc = FakeFenixException()
self.assertEqual(404, exc.kwargs['code'])
def test_error_code_from_kwarg(self):
class FakeFenixException(exceptions.FenixException):
code = 500
exc = FakeFenixException(code=404)
self.assertEqual(404, exc.kwargs['code'])
def test_policynotauthorized_exception(self):
exc = exceptions.PolicyNotAuthorized(action='foo')
self.assertEqual("Policy doesn't allow foo to be performed",
six.text_type(exc))
self.assertEqual(403, exc.kwargs['code'])

View File

@ -1,28 +0,0 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_fenix
----------------------------------
Tests for `fenix` module.
"""
from fenix.tests import base
class TestFenix(base.TestCase):
def test_something(self):
pass

View File

@ -0,0 +1,77 @@
# Copyright (c) 2013 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of Policy Engine For Fenix."""
from oslo_config import cfg
from fenix import context
from fenix import exceptions
from fenix import policy
from fenix import tests
CONF = cfg.CONF
class FenixPolicyTestCase(tests.TestCase):
def setUp(self):
super(FenixPolicyTestCase, self).setUp()
self.context = context.FenixContext(user_id='fake',
project_id='fake',
roles=['member'])
def test_standardpolicy(self):
target_good = {'user_id': self.context.user_id,
'project_id': self.context.project_id}
target_wrong = {'user_id': self.context.user_id,
'project_id': 'bad_project'}
action = "fenix:maintenance:session:project:get"
self.assertTrue(policy.enforce(self.context, action,
target_good))
self.assertFalse(policy.enforce(self.context, action,
target_wrong, False))
def test_adminpolicy(self):
target = {'user_id': self.context.user_id,
'project_id': self.context.project_id}
action = ""
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
def test_elevatedpolicy(self):
target = {'user_id': self.context.user_id,
'project_id': self.context.project_id}
action = "fenix:maintenance:get"
self.assertRaises(exceptions.PolicyNotAuthorized, policy.enforce,
self.context, action, target)
elevated_context = self.context.elevated()
self.assertTrue(policy.enforce(elevated_context, action, target))
def test_authorize(self):
@policy.authorize('maintenance:session:project', 'get',
ctx=self.context)
def user_method_with_action(self):
return True
@policy.authorize('maintenance', 'get', ctx=self.context)
def adminonly_method_with_action(self):
return True
self.assertTrue(user_method_with_action(self))
self.assertRaises(exceptions.PolicyNotAuthorized,
adminonly_method_with_action, self)

View File

@ -118,7 +118,7 @@ Use DevStack admin as user. Set your variables needed accordingly
```sh ```sh
. ~/devstack/operc admin admin . ~/devstack/operc admin admin
USER_ID=`openstack user list | grep admin | awk '{print $2}' USER_ID=`openstack user list | grep admin | awk '{print $2}'`
HOST=192.0.2.4 HOST=192.0.2.4
PORT=12347 PORT=12347
``` ```
@ -150,6 +150,12 @@ recover system before trying to test again. This is covered in Term3 below.
#### Term3: VNFM (fenix/tools/vnfm.py) #### Term3: VNFM (fenix/tools/vnfm.py)
Use DevStack admin as user.
```sh
. ~/devstack/operc admin admin
```
Go to Fenix Kubernetes tool directory for testing Go to Fenix Kubernetes tool directory for testing
```sh ```sh

View File

@ -4,13 +4,20 @@ openstackdocstheme==1.31.2 # Apache-2.0
oslotest==3.8.0 # Apache-2.0 oslotest==3.8.0 # Apache-2.0
pbr==2.0 # Apache-2.0 pbr==2.0 # Apache-2.0
python-subunit==1.3.0 # Apache-2.0/BSD python-subunit==1.3.0 # Apache-2.0/BSD
reno==2.11.3;python_version=='2.7'
reno==3.0.0;python_version=='3.5'
reno==3.0.0;python_version=='3.6' reno==3.0.0;python_version=='3.6'
reno==3.0.0;python_version=='3.7' reno==3.0.0;python_version=='3.7'
sphinx==1.8.5;python_version=='2.7'
sphinx==2.3.1;python_version=='3.5'
sphinx==2.3.1;python_version=='3.6' sphinx==2.3.1;python_version=='3.6'
sphinx==2.3.1;python_version=='3.7' sphinx==2.3.1;python_version=='3.7'
stestr==1.0.0 # Apache-2.0 stestr==1.0.0 # Apache-2.0
testtools==2.2.0 # MIT testtools==2.2.0 # MIT
ddt==1.0.1 # MIT
mock==2.0.0 # BSD
fixtures==3.0.0 # Apache-2.0/BSD
testrepository==0.0.18 # Apache-2.0/BSD
testscenarios==0.4 # Apache-2.0/BSD
oslo.context==2.23 # Apache-2.0
oslo.config==4.46 # Apache-2.0
oslo.log==3.43 # Apache-2.0
oslo.db==4.46 # Apache-2.0
oslo.policy==2.2.0 # Apache-2.0
oslo.messaging==9.6.0 # Apache-2.0

View File

@ -8,3 +8,14 @@ python-subunit>=1.3.0 # Apache-2.0/BSD
oslotest>=3.8.0 # Apache-2.0 oslotest>=3.8.0 # Apache-2.0
stestr>=1.0.0 # Apache-2.0 stestr>=1.0.0 # Apache-2.0
testtools>=2.2.0 # MIT testtools>=2.2.0 # MIT
ddt>=1.0.1 # MIT
mock>=2.0.0 # BSD
fixtures>=3.0.0 # Apache-2.0/BSD
testrepository>=0.0.18 # Apache-2.0/BSD
testscenarios>=0.4 # Apache-2.0/BSD
oslo.context>=2.23 # Apache-2.0
oslo.config>=4.46 # Apache-2.0
oslo.log>=3.43 # Apache-2.0
oslo.db>=4.46 # Apache-2.0
oslo.policy!=3.0.0,>=2.2.0 # Apache-2.0
oslo.messaging>=9.6.0 # Apache-2.0

View File

@ -5,17 +5,15 @@ ignore_basepython_conflict = True
[testenv] [testenv]
usedevelop = True usedevelop = True
install_command = pip install -U {opts} {packages} install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} {opts} {packages}
setenv = setenv =
VIRTUAL_ENV={envdir} VIRTUAL_ENV={envdir}
PYTHONWARNINGS=default::DeprecationWarning PYTHONWARNINGS=default::DeprecationWarning
OS_STDOUT_CAPTURE=1 OS_STDOUT_CAPTURE=1
OS_STDERR_CAPTURE=1 OS_STDERR_CAPTURE=1
OS_TEST_TIMEOUT=60 OS_TEST_TIMEOUT=60
deps = deps = -r{toxinidir}/requirements.txt
-c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = stestr run {posargs} commands = stestr run {posargs}
[testenv:pep8] [testenv:pep8]