diff --git a/tempest/config.py b/tempest/config.py index b787b19520..76d0fbad29 100644 --- a/tempest/config.py +++ b/tempest/config.py @@ -15,6 +15,7 @@ from __future__ import print_function +import functools import logging as std_logging import os import tempfile @@ -22,6 +23,7 @@ import tempfile from oslo_concurrency import lockutils from oslo_config import cfg from oslo_log import log as logging +import testtools from tempest.test_discover import plugins @@ -1397,3 +1399,72 @@ class TempestConfigProxy(object): CONF = TempestConfigProxy() + + +def skip_unless_config(*args): + """Decorator to raise a skip if a config opt doesn't exist and is False + + :param str group: The first arg, the option group to check + :param str name: The second arg, the option name to check + :param str msg: Optional third arg, the skip msg to use if a skip is raised + :raises testtools.TestCaseskipException: If the specified config option + doesn't exist or it exists and evaluates to False + """ + def decorator(f): + group = args[0] + name = args[1] + + @functools.wraps(f) + def wrapper(self, *func_args, **func_kwargs): + if not hasattr(CONF, group): + msg = "Config group %s doesn't exist" % group + raise testtools.TestCase.skipException(msg) + else: + conf_group = getattr(CONF, group) + if not hasattr(conf_group, name): + msg = "Config option %s.%s doesn't exist" % (group, + name) + raise testtools.TestCase.skipException(msg) + else: + value = getattr(conf_group, name) + if not value: + if len(args) == 3: + msg = args[2] + else: + msg = "Config option %s.%s is false" % (group, + name) + raise testtools.TestCase.skipException(msg) + return f(self, *func_args, **func_kwargs) + return wrapper + return decorator + + +def skip_if_config(*args): + """Raise a skipException if a config exists and is True + + :param str group: The first arg, the option group to check + :param str name: The second arg, the option name to check + :param str msg: Optional third arg, the skip msg to use if a skip is raised + :raises testtools.TestCase.skipException: If the specified config option + exists and evaluates to True + """ + def decorator(f): + group = args[0] + name = args[1] + + @functools.wraps(f) + def wrapper(self, *func_args, **func_kwargs): + if hasattr(CONF, group): + conf_group = getattr(CONF, group) + if hasattr(conf_group, name): + value = getattr(conf_group, name) + if value: + if len(args) == 3: + msg = args[2] + else: + msg = "Config option %s.%s is false" % (group, + name) + raise testtools.TestCase.skipException(msg) + return f(self, *func_args, **func_kwargs) + return wrapper + return decorator diff --git a/tempest/tests/test_decorators.py b/tempest/tests/test_decorators.py index 7c9579bb12..af5fc09ba9 100644 --- a/tempest/tests/test_decorators.py +++ b/tempest/tests/test_decorators.py @@ -246,3 +246,75 @@ class TestSimpleNegativeDecorator(BaseDecoratorsTest): self.assertIn("test_fake_negative", dir(obj)) obj.test_fake_negative() mock.assert_called_once_with(self.FakeNegativeJSONTest._schema) + + +class TestConfigDecorators(BaseDecoratorsTest): + def setUp(self): + super(TestConfigDecorators, self).setUp() + cfg.CONF.set_default('nova', True, 'service_available') + cfg.CONF.set_default('glance', False, 'service_available') + + def _test_skip_unless_config(self, expected_to_skip=True, *decorator_args): + + class TestFoo(test.BaseTestCase): + @config.skip_unless_config(*decorator_args) + def test_bar(self): + return 0 + + t = TestFoo('test_bar') + if expected_to_skip: + self.assertRaises(testtools.TestCase.skipException, t.test_bar) + else: + try: + self.assertEqual(t.test_bar(), 0) + except testtools.TestCase.skipException: + # We caught a skipException but we didn't expect to skip + # this test so raise a hard test failure instead. + raise testtools.TestCase.failureException( + "Not supposed to skip") + + def _test_skip_if_config(self, expected_to_skip=True, + *decorator_args): + + class TestFoo(test.BaseTestCase): + @config.skip_if_config(*decorator_args) + def test_bar(self): + return 0 + + t = TestFoo('test_bar') + if expected_to_skip: + self.assertRaises(testtools.TestCase.skipException, t.test_bar) + else: + try: + self.assertEqual(t.test_bar(), 0) + except testtools.TestCase.skipException: + # We caught a skipException but we didn't expect to skip + # this test so raise a hard test failure instead. + raise testtools.TestCase.failureException( + "Not supposed to skip") + + def test_skip_unless_no_group(self): + self._test_skip_unless_config(True, 'fake_group', 'an_option') + + def test_skip_unless_no_option(self): + self._test_skip_unless_config(True, 'service_available', + 'not_an_option') + + def test_skip_unless_false_option(self): + self._test_skip_unless_config(True, 'service_available', 'glance') + + def test_skip_unless_true_option(self): + self._test_skip_unless_config(False, + 'service_available', 'nova') + + def test_skip_if_no_group(self): + self._test_skip_if_config(False, 'fake_group', 'an_option') + + def test_skip_if_no_option(self): + self._test_skip_if_config(False, 'service_available', 'not_an_option') + + def test_skip_if_false_option(self): + self._test_skip_if_config(False, 'service_available', 'glance') + + def test_skip_if_true_option(self): + self._test_skip_if_config(True, 'service_available', 'nova')