Rally logging: test engine
This patch provides the Rally test engine component with logging facilities. Blueprint logging Change-Id: I278990a831d17d5ae2f1b103ef3c17480a5fc80b
This commit is contained in:
@@ -26,6 +26,10 @@ from rally.benchmark import tests
|
||||
from rally.benchmark import utils
|
||||
from rally import consts
|
||||
from rally import exceptions
|
||||
from rally.openstack.common.gettextutils import _ # noqa
|
||||
from rally.openstack.common import log as logging
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestEngine(object):
|
||||
@@ -63,6 +67,7 @@ class TestEngine(object):
|
||||
}
|
||||
}
|
||||
}
|
||||
:param task: The current task which is being performed
|
||||
"""
|
||||
self.task = task
|
||||
self._validate_test_config(test_config)
|
||||
@@ -78,10 +83,14 @@ class TestEngine(object):
|
||||
|
||||
:raises: Exception if the test config is not valid
|
||||
"""
|
||||
# Perform schema verification
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.info(_('Task %s: Validating the passed test config...') %
|
||||
task_uuid)
|
||||
# Perform schema validation
|
||||
try:
|
||||
jsonschema.validate(test_config, config.test_config_schema)
|
||||
except jsonschema.ValidationError as e:
|
||||
LOG.exception(_('Task %s: Error: %s') % (task_uuid, e.message))
|
||||
raise exceptions.InvalidConfigException(message=e.message)
|
||||
|
||||
# Check for test names
|
||||
@@ -91,7 +100,11 @@ class TestEngine(object):
|
||||
continue
|
||||
for test in test_config[test_type]['tests_to_run']:
|
||||
if test not in tests.tests[test_type]:
|
||||
LOG.exception(_('Task %s: Error: specified '
|
||||
'test does not exist: %s') %
|
||||
(task_uuid, test))
|
||||
raise exceptions.NoSuchTestException(test_name=test)
|
||||
LOG.info(_('Task %s: Test config validation succeeded.') % task_uuid)
|
||||
|
||||
def _format_test_config(self, test_config):
|
||||
"""Returns a formatted copy of the given valid test config so that
|
||||
@@ -102,6 +115,9 @@ class TestEngine(object):
|
||||
|
||||
:returns: Dictionary
|
||||
"""
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.debug(_('Task %s: Formatting the given test config...') %
|
||||
task_uuid)
|
||||
formatted_test_config = copy.deepcopy(test_config)
|
||||
# NOTE(msdubov): if 'verify' or 'benchmark' tests are not specified,
|
||||
# run them all by default.
|
||||
@@ -117,17 +133,27 @@ class TestEngine(object):
|
||||
formatted_test_config['benchmark'] = {
|
||||
'tests_to_run': tests_to_run
|
||||
}
|
||||
LOG.debug(_('Task %s: Test config formatting succeeded.') % task_uuid)
|
||||
return formatted_test_config
|
||||
|
||||
def __enter__(self):
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.debug(_('Task %s: Writing cloud & test configs into '
|
||||
'temporary files...') % task_uuid)
|
||||
with os.fdopen(self.cloud_config_fd, 'w') as f:
|
||||
self.cloud_config.write(f)
|
||||
with os.fdopen(self.test_config_fd, 'w') as f:
|
||||
self.test_config.write(f)
|
||||
LOG.debug(_('Task %s: Completed writing temporary '
|
||||
'config files.') % task_uuid)
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.debug(_('Task %s: Deleting temporary config files...') % task_uuid)
|
||||
os.remove(self.cloud_config_path)
|
||||
os.remove(self.test_config_path)
|
||||
LOG.debug(_('Task %s: Completed deleting temporary '
|
||||
'config files.') % task_uuid)
|
||||
|
||||
def bind(self, cloud_config):
|
||||
"""Binds an existing deployment configuration to the test engine.
|
||||
@@ -141,8 +167,11 @@ class TestEngine(object):
|
||||
|
||||
:returns: self (the method should be called in a 'with' statement)
|
||||
"""
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.info(_('Task %s: Binding the cloud config...') % task_uuid)
|
||||
self.cloud_config = config.CloudConfigManager()
|
||||
self.cloud_config.read_from_dict(cloud_config)
|
||||
LOG.info(_('Task %s: Successfuly bound the cloud config.') % task_uuid)
|
||||
|
||||
self.cloud_config_fd, self.cloud_config_path = tempfile.mkstemp(
|
||||
suffix='rallycfg', text=True)
|
||||
@@ -156,8 +185,10 @@ class TestEngine(object):
|
||||
|
||||
:raises: VerificationException if some of the verification tests failed
|
||||
"""
|
||||
task_uuid = self.task['uuid']
|
||||
self.task.update_status(consts.TaskStatus.TEST_TOOL_VERIFY_OPENSTACK)
|
||||
tester = utils.Tester(self.cloud_config_path)
|
||||
LOG.info(_('Task %s: Verifying the cloud deployment...') % task_uuid)
|
||||
tester = utils.Tester(self.task, self.cloud_config_path)
|
||||
tests_to_run = self.test_config.to_dict()['verify']['tests_to_run']
|
||||
verification_tests = dict((test, tests.verification_tests[test])
|
||||
for test in tests_to_run)
|
||||
@@ -166,8 +197,13 @@ class TestEngine(object):
|
||||
for test_results in test_run_results:
|
||||
for result in test_results.itervalues():
|
||||
if result['status'] != 0:
|
||||
error_msg = '\n'.join(result['msg'])
|
||||
LOG.exception(_('Task %s: One of verification '
|
||||
'tests failed: %s') %
|
||||
(task_uuid, error_msg))
|
||||
raise exceptions.DeploymentVerificationException(
|
||||
test_message=result['msg'])
|
||||
test_message=error_msg)
|
||||
LOG.info(_('Task %s: Verification succeeded.') % task_uuid)
|
||||
|
||||
def benchmark(self):
|
||||
"""Runs the benchmarks according to the test configuration
|
||||
@@ -176,9 +212,14 @@ class TestEngine(object):
|
||||
:returns: List of dicts, each dict containing the results of all the
|
||||
corresponding benchmark test launches
|
||||
"""
|
||||
task_uuid = self.task['uuid']
|
||||
self.task.update_status(consts.TaskStatus.TEST_TOOL_BENCHMARKING)
|
||||
tester = utils.Tester(self.cloud_config_path, self.test_config_path)
|
||||
LOG.info(_('Task %s: Launching benchmark scenarios...') % task_uuid)
|
||||
tester = utils.Tester(self.task, self.cloud_config_path,
|
||||
self.test_config_path)
|
||||
tests_to_run = self.test_config.to_dict()['benchmark']['tests_to_run']
|
||||
benchmark_tests = dict((test, tests.benchmark_tests[test])
|
||||
for test in tests_to_run)
|
||||
return tester.run_all(benchmark_tests)
|
||||
result = tester.run_all(benchmark_tests)
|
||||
LOG.info(_('Task %s: Completed benchmarking.') % task_uuid)
|
||||
return result
|
||||
|
@@ -23,8 +23,12 @@ import pytest
|
||||
import fuel_health.cleanup as fuel_cleanup
|
||||
|
||||
from rally.benchmark import config
|
||||
from rally.openstack.common.gettextutils import _ # noqa
|
||||
from rally.openstack.common import log as logging
|
||||
from rally import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parameterize_from_test_config(benchmark_name):
|
||||
"""Decorator that configures the test function parameters through the
|
||||
@@ -69,7 +73,7 @@ def _run_cleanup(config):
|
||||
|
||||
class Tester(object):
|
||||
|
||||
def __init__(self, cloud_config_path, test_config_path=None):
|
||||
def __init__(self, task, cloud_config_path, test_config_path=None):
|
||||
self._cloud_config_path = os.path.abspath(cloud_config_path)
|
||||
if test_config_path:
|
||||
self._test_config_manager = config.TestConfigManager(
|
||||
@@ -77,6 +81,7 @@ class Tester(object):
|
||||
os.environ['PYTEST_CONFIG'] = os.path.abspath(test_config_path)
|
||||
else:
|
||||
self._test_config_manager = None
|
||||
self.task = task
|
||||
self._q = multiprocessing.Queue()
|
||||
|
||||
def run_all(self, tests):
|
||||
@@ -96,8 +101,11 @@ class Tester(object):
|
||||
else:
|
||||
tests_to_run = {}
|
||||
|
||||
task_uuid = self.task['uuid']
|
||||
res = []
|
||||
for test_name in tests:
|
||||
LOG.debug(_('Task %s: Launching benchmark `%s`...') %
|
||||
(task_uuid, test_name))
|
||||
test_runs = tests_to_run.get(test_name, [{}])
|
||||
for i, test_run in enumerate(test_runs):
|
||||
times = test_run.get('times', 1)
|
||||
@@ -105,6 +113,8 @@ class Tester(object):
|
||||
os.environ['PYTEST_RUN_INDEX'] = str(i)
|
||||
res.append(self.run(tests[test_name],
|
||||
times=times, concurrent=concurrent))
|
||||
LOG.debug(_('Task %s: Completed benchmark `%s`.') %
|
||||
(task_uuid, test_name))
|
||||
return res
|
||||
|
||||
def run(self, test_args, times=1, concurrent=1):
|
||||
@@ -126,17 +136,27 @@ class Tester(object):
|
||||
timeout = str(60 * 60 * 60 / times)
|
||||
test_args.extend(['--timeout', timeout])
|
||||
|
||||
task_uuid = self.task['uuid']
|
||||
LOG.debug(_('Task %s: Running test: creating multiprocessing pool') %
|
||||
task_uuid)
|
||||
LOG.debug(_('Times = %i, Concurrent = %i') % (times, concurrent))
|
||||
iterable_test_args = ((test_args, self._cloud_config_path, n)
|
||||
for n in xrange(times))
|
||||
pool = multiprocessing.Pool(concurrent)
|
||||
result_generator = pool.imap(_run_test, iterable_test_args)
|
||||
results = {}
|
||||
for result in result_generator:
|
||||
LOG.debug(_('Task %s: Process %s returned.') %
|
||||
(task_uuid, result['proc_name']))
|
||||
results[result['proc_name']] = result
|
||||
if result['status'] and 'Timeout' in result['msg']:
|
||||
# cancel remaining tests if one test was timed out
|
||||
LOG.debug(_('Task %s: One of the tests timed out, '
|
||||
'cancelling remaining tests...') % task_uuid)
|
||||
break
|
||||
LOG.debug(_('Task %s: Cleaning up...') % task_uuid)
|
||||
self._cleanup(self._cloud_config_path)
|
||||
LOG.debug(_('Task %s: Cleanup completed.') % task_uuid)
|
||||
return results
|
||||
|
||||
def _cleanup(self, cloud_config_path):
|
||||
|
@@ -74,21 +74,22 @@ class TestEngineTestCase(test.NoDBTestCase):
|
||||
|
||||
def test_verify_test_config(self):
|
||||
try:
|
||||
engine.TestEngine(self.valid_test_config, mock.Mock())
|
||||
engine.TestEngine(self.valid_test_config, mock.MagicMock())
|
||||
except Exception as e:
|
||||
self.fail("Unexpected exception in test config" +
|
||||
"verification: %s" % str(e))
|
||||
self.assertRaises(exceptions.NoSuchTestException,
|
||||
engine.TestEngine,
|
||||
self.invalid_test_config_bad_test_name,
|
||||
mock.Mock())
|
||||
mock.MagicMock())
|
||||
self.assertRaises(exceptions.InvalidConfigException,
|
||||
engine.TestEngine,
|
||||
self.invalid_test_config_bad_key,
|
||||
mock.Mock())
|
||||
mock.MagicMock())
|
||||
|
||||
def test_bind(self):
|
||||
test_engine = engine.TestEngine(self.valid_test_config, mock.Mock())
|
||||
test_engine = engine.TestEngine(self.valid_test_config,
|
||||
mock.MagicMock())
|
||||
with test_engine.bind(self.valid_cloud_config):
|
||||
self.assertTrue(os.path.exists(test_engine.cloud_config_path))
|
||||
self.assertTrue(os.path.exists(test_engine.test_config_path))
|
||||
@@ -96,7 +97,8 @@ class TestEngineTestCase(test.NoDBTestCase):
|
||||
self.assertFalse(os.path.exists(test_engine.test_config_path))
|
||||
|
||||
def test_verify(self):
|
||||
test_engine = engine.TestEngine(self.valid_test_config, mock.Mock())
|
||||
test_engine = engine.TestEngine(self.valid_test_config,
|
||||
mock.MagicMock())
|
||||
with test_engine.bind(self.valid_cloud_config):
|
||||
try:
|
||||
test_engine.verify()
|
||||
@@ -104,7 +106,8 @@ class TestEngineTestCase(test.NoDBTestCase):
|
||||
self.fail("Exception in TestEngine.verify: %s" % str(e))
|
||||
|
||||
def test_benchmark(self):
|
||||
test_engine = engine.TestEngine(self.valid_test_config, mock.Mock())
|
||||
test_engine = engine.TestEngine(self.valid_test_config,
|
||||
mock.MagicMock())
|
||||
with test_engine.bind(self.valid_cloud_config):
|
||||
test_engine.benchmark()
|
||||
|
||||
@@ -120,9 +123,12 @@ class TestEngineTestCase(test.NoDBTestCase):
|
||||
mock.call.update_status(s.TEST_TOOL_VERIFY_OPENSTACK),
|
||||
mock.call.update_verification_log(json.dumps(
|
||||
[self.run_success, self.run_success])),
|
||||
mock.call.update_status(s.TEST_TOOL_BENCHMARKING),
|
||||
mock.call.update_status(s.TEST_TOOL_BENCHMARKING)
|
||||
]
|
||||
self.assertEqual(fake_task.mock_calls, expected)
|
||||
# NOTE(msdubov): Ignore task['uuid'] calls which are used for logging
|
||||
mock_calls = filter(lambda call: '__getitem__' not in call[0],
|
||||
fake_task.mock_calls)
|
||||
self.assertEqual(mock_calls, expected)
|
||||
|
||||
def test_task_status_invalid_config(self):
|
||||
fake_task = mock.MagicMock()
|
||||
@@ -131,4 +137,7 @@ class TestEngineTestCase(test.NoDBTestCase):
|
||||
except exceptions.InvalidConfigException:
|
||||
pass
|
||||
expected = []
|
||||
self.assertEqual(fake_task.mock_calls, expected)
|
||||
# NOTE(msdubov): Ignore task['uuid'] calls which are used for logging
|
||||
mock_calls = filter(lambda call: '__getitem__' not in call[0],
|
||||
fake_task.mock_calls)
|
||||
self.assertEqual(mock_calls, expected)
|
||||
|
@@ -56,7 +56,7 @@ class UtilsTestCase(test.NoDBTestCase):
|
||||
super(UtilsTestCase, self).tearDown()
|
||||
|
||||
def test_running_test(self):
|
||||
tester = utils.Tester(self.cloud_config_path)
|
||||
tester = utils.Tester(mock.MagicMock(), self.cloud_config_path)
|
||||
test = ['./tests/benchmark/test_utils.py', '-k', 'test_dummy_1']
|
||||
for (times, concurrent) in [(1, 1), (3, 2), (2, 3)]:
|
||||
results = tester.run(test, times=times, concurrent=concurrent)
|
||||
@@ -65,7 +65,7 @@ class UtilsTestCase(test.NoDBTestCase):
|
||||
self.assertEqual(result['status'], 0)
|
||||
|
||||
def test_running_multiple_tests(self):
|
||||
tester = utils.Tester(self.cloud_config_path)
|
||||
tester = utils.Tester(mock.MagicMock(), self.cloud_config_path)
|
||||
tests_dict = {
|
||||
'test1': ['./tests/benchmark/test_utils.py', '-k', 'test_dummy_1'],
|
||||
'test2': ['./tests/benchmark/test_utils.py', '-k', 'test_dummy_2']
|
||||
@@ -89,14 +89,14 @@ class UtilsTestCase(test.NoDBTestCase):
|
||||
}
|
||||
}
|
||||
}
|
||||
test_engine = engine.TestEngine(test_config, mock.Mock())
|
||||
test_engine = engine.TestEngine(test_config, mock.MagicMock())
|
||||
with test_engine.bind(cloud_config):
|
||||
res = test_engine.benchmark()
|
||||
self.assertEqual(res[0].values()[0]['status'], 0)
|
||||
tests.benchmark_tests = old_benchmark_tests
|
||||
|
||||
def test_tester_timeout(self):
|
||||
tester = utils.Tester(self.cloud_config_path)
|
||||
tester = utils.Tester(mock.MagicMock(), self.cloud_config_path)
|
||||
test = ['./tests/benchmark/test_utils.py', '-k',
|
||||
'test_dummy_timeout', '--timeout', '1']
|
||||
results = tester.run(test, times=2, concurrent=2)
|
||||
@@ -105,7 +105,7 @@ class UtilsTestCase(test.NoDBTestCase):
|
||||
self.assertTrue(result['status'] != 0)
|
||||
|
||||
def test_tester_no_timeout(self):
|
||||
tester = utils.Tester(self.cloud_config_path)
|
||||
tester = utils.Tester(mock.MagicMock(), self.cloud_config_path)
|
||||
test = ['./tests/benchmark/test_utils.py', '-k',
|
||||
'test_dummy_timeout', '--timeout', '2']
|
||||
results = tester.run(test, times=2, concurrent=2)
|
||||
|
Reference in New Issue
Block a user