Improved log path handling

Validations logging now has a documented fallback,
in order to improve readability and provide reference.

Tests are included.

Complementary patch for validations_commons callbacks
is optional, as the behavior should stay largely the same.

Depends-On: https://review.opendev.org/c/openstack/validations-libs/+/794638

Signed-off-by: Jiri Podivin <jpodivin@redhat.com>
Change-Id: I02df49532974cd0bcb2df8176b19ffc1cd5a65d1
This commit is contained in:
Jiri Podivin 2021-05-03 11:36:29 +02:00
parent 6a0293b11e
commit b0a4bd7140
5 changed files with 248 additions and 74 deletions

View File

@ -20,9 +20,7 @@ ANSIBLE_VALIDATION_DIR = '/usr/share/ansible/validation-playbooks'
VALIDATION_GROUPS_INFO = '%s/groups.yaml' % DEFAULT_VALIDATIONS_BASEDIR
VALIDATIONS_LOG_BASEDIR = ('/var/log/validations'
if os.path.exists('/var/log/validations') else
os.getcwd())
VALIDATIONS_LOG_BASEDIR = os.path.join(os.environ.get('HOME'), 'validations')
VALIDATION_ANSIBLE_ARTIFACT_PATH = '{}/artifacts/'.format(
VALIDATIONS_LOG_BASEDIR)

View File

@ -47,17 +47,64 @@ class TestUtils(TestCase):
utils.get_validations_data,
validation)
@mock.patch('validations_libs.utils.LOG.error')
@mock.patch('validations_libs.utils.LOG.warning')
@mock.patch('validations_libs.utils.os.path.exists',
return_value=True)
@mock.patch('validations_libs.utils.current_time',
return_value='2020-04-02T06:58:20.352272Z')
@mock.patch('os.makedirs')
@mock.patch('uuid.uuid4', return_value='1234')
def test_create_artifacts_dir(self, mock_uuid, mock_makedirs,
mock_datetime):
uuid, dir_path = utils.create_artifacts_dir(dir_path='/tmp/foo',
prefix='ntp')
def test_create_artifacts_subdir(self, mock_uuid, mock_makedirs,
mock_datetime, mock_path_exists,
mock_warning, mock_error):
uuid, artifacts_dir = utils.get_artifacts_subdir(
log_dir='/tmp/foo/artifacts',
prefix='ntp')
self.assertEqual(uuid, '1234')
self.assertEqual(dir_path,
'/tmp/foo/1234_ntp_2020-04-02T06:58:20.352272Z')
self.assertEqual(artifacts_dir,
'/tmp/foo/artifacts/1234_ntp_2020-04-02T06:58:20.352272Z')
mock_error.assert_not_called()
mock_warning.assert_not_called()
@mock.patch('validations_libs.utils.LOG.exception')
@mock.patch('validations_libs.utils.current_time',
return_value='2020-04-02T06:58:20.352272Z')
@mock.patch('validations_libs.utils.os.path.exists', return_value=False)
@mock.patch('os.makedirs', side_effect=[OSError, None])
@mock.patch('uuid.uuid4', return_value='1234')
def test_create_artifacts_subdir_OSError(self, mock_uuid, mock_makedirs,
mock_exists, mock_datetime,
mock_exception):
self.assertRaises(
RuntimeError,
utils.get_artifacts_subdir,
log_dir='/tmp/foo/artifacts',
prefix='ntp')
mock_exception.assert_called()
@mock.patch('validations_libs.utils.LOG.exception')
@mock.patch('validations_libs.utils.current_time',
return_value='2020-04-02T06:58:20.352272Z')
@mock.patch('validations_libs.utils.os.path.exists', return_value=False)
@mock.patch('os.makedirs', side_effect=[PermissionError, None])
@mock.patch('uuid.uuid4', return_value='1234')
def test_create_artifacts_subdir_PermissionError(self, mock_uuid,
mock_makedirs,
mock_exists, mock_datetime,
mock_exception):
self.assertRaises(
RuntimeError,
utils.get_artifacts_subdir,
log_dir='/tmp/foo/artifacts',
prefix='ntp')
mock_exception.assert_called()
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('six.moves.builtins.open')

View File

@ -45,11 +45,12 @@ class TestValidationActions(TestCase):
'My Validation Two Name',
['prep', 'pre-introspection'])]))
@mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
@mock.patch('validations_libs.utils.get_artifacts_subdir',
return_value=('123', '/var/log/validations'))
@mock.patch('validations_libs.utils.get_validations_playbook',
return_value=['/tmp/foo/fake.yaml'])
def test_validation_skip_validation(self, mock_validation_play, mock_tmp):
def test_validation_skip_validation(self, mock_validation_play,
mock_artsubdir):
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
@ -66,16 +67,23 @@ class TestValidationActions(TestCase):
limit_hosts=None)
self.assertEqual(run_return, [])
@mock.patch('validations_libs.utils.os.path.exists',
return_value=True)
@mock.patch('validations_libs.utils.get_artifacts_subdir',
return_value=(
'123',
'/var/log/validations/artifacts/123_foo_time'))
@mock.patch('validations_libs.utils.get_validations_playbook',
return_value=['/tmp/foo/fake.yaml'])
@mock.patch('validations_libs.ansible.Ansible.run')
@mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_skip_on_specific_host(self, mock_tmp, mock_ansible_run,
mock_validation_play):
def test_validation_skip_on_specific_host(self, mock_ansible_run,
mock_validation_play,
mock_artsubdir,
mock_exists):
mock_ansible_run.return_value = ('fake.yaml', 0, 'successful')
run_called_args = {
'workdir': '/tmp/',
'workdir': '/var/log/validations/artifacts/123_foo_time',
'playbook': '/tmp/foo/fake.yaml',
'base_dir': '/usr/share/ansible/',
'playbook_dir': '/tmp/foo',
@ -86,11 +94,11 @@ class TestValidationActions(TestCase):
'quiet': True,
'extra_vars': None,
'limit_hosts': '!cloud1',
'ansible_artifact_path': '/tmp/',
'extra_env_variables': None,
'ansible_cfg': None,
'gathering_policy': 'explicit',
'log_path': None,
'ansible_artifact_path': '/var/log/validations/artifacts/123_foo_time',
'log_path': '/var/log/validations',
'run_async': False,
'python_interpreter': None,
'ssh_user': None
@ -106,22 +114,30 @@ class TestValidationActions(TestCase):
run = ValidationActions()
run_return = run.run_validations(playbook, inventory,
log_path='/var/log/validations',
validations_dir='/tmp/foo',
skip_list=skip_list,
limit_hosts='!cloud1',
)
mock_ansible_run.assert_called_with(**run_called_args)
@mock.patch('validations_libs.utils.os.path.exists',
return_value=True)
@mock.patch('validations_libs.utils.get_artifacts_subdir',
return_value=(
'123',
'/var/log/validations/artifacts/123_foo_time'))
@mock.patch('validations_libs.utils.get_validations_playbook',
return_value=['/tmp/foo/fake.yaml'])
@mock.patch('validations_libs.ansible.Ansible.run')
@mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_skip_with_limit_host(self, mock_tmp, mock_ansible_run,
mock_validation_play):
def test_validation_skip_with_limit_host(self, mock_ansible_run,
mock_validation_play,
mock_artsubdir,
mock_exists):
mock_ansible_run.return_value = ('fake.yaml', 0, 'successful')
run_called_args = {
'workdir': '/tmp/',
'workdir': '/var/log/validations/artifacts/123_foo_time',
'playbook': '/tmp/foo/fake.yaml',
'base_dir': '/usr/share/ansible/',
'playbook_dir': '/tmp/foo',
@ -135,8 +151,8 @@ class TestValidationActions(TestCase):
'extra_env_variables': None,
'ansible_cfg': None,
'gathering_policy': 'explicit',
'ansible_artifact_path': '/tmp/',
'log_path': None,
'ansible_artifact_path': '/var/log/validations/artifacts/123_foo_time',
'log_path': '/var/log/validations',
'run_async': False,
'python_interpreter': None,
'ssh_user': None
@ -152,40 +168,36 @@ class TestValidationActions(TestCase):
run = ValidationActions()
run_return = run.run_validations(playbook, inventory,
log_path='/var/log/validations',
validations_dir='/tmp/foo',
skip_list=skip_list,
limit_hosts='cloud,cloud1,!cloud2')
mock_ansible_run.assert_called_with(**run_called_args)
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
@mock.patch('validations_libs.utils.get_artifacts_subdir',
return_value=(
'123',
'/var/log/validations/artifacts/123_foo_time'))
@mock.patch('validations_libs.validation_actions.ValidationLogs.get_results',
side_effect=fakes.FAKE_SUCCESS_RUN)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run')
@mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_run_success(self, mock_tmp, mock_ansible_run,
mock_validation_dir, mock_results):
def test_validation_run_success(self, mock_ansible_run,
mock_validation_dir,
mock_results,
mock_artsubdir):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {}}]
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
mock_results.return_value = [{'Duration': '0:00:01.761',
'Host_Group': 'overcloud',
'Status': 'PASSED',
'Status_by_Host': 'subnode-1,PASSED',
'UUID': 'foo',
'Unreachable_Hosts': '',
'Validations': 'ntp'}]
expected_run_return = [{'Duration': '0:00:01.761',
'Host_Group': 'overcloud',
'Status': 'PASSED',
'Status_by_Host': 'subnode-1,PASSED',
'UUID': 'foo',
'Unreachable_Hosts': '',
'Validations': 'ntp'}]
expected_run_return = fakes.FAKE_SUCCESS_RUN[0]
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
@ -206,20 +218,26 @@ class TestValidationActions(TestCase):
validations_dir='/tmp/foo'
)
@mock.patch('validations_libs.utils.get_artifacts_subdir',
return_value=(
'123',
'/var/log/validations/artifacts/123_foo_time'))
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run')
@mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_run_failed(self, mock_tmp, mock_ansible_run,
mock_validation_dir, mock_results):
def test_validation_run_failed(self, mock_ansible_run,
mock_validation_dir, mock_results,
mock_artsubdir):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {}}]
mock_ansible_run.return_value = ('foo.yaml', 0, 'failed')
mock_results.return_value = [{'Duration': '0:00:01.761',
'Host_Group': 'overcloud',
'Status': 'PASSED',
@ -227,6 +245,7 @@ class TestValidationActions(TestCase):
'UUID': 'foo',
'Unreachable_Hosts': '',
'Validations': 'ntp'}]
expected_run_return = [{'Duration': '0:00:01.761',
'Host_Group': 'overcloud',
'Status': 'PASSED',

View File

@ -32,32 +32,137 @@ def current_time():
return '%sZ' % datetime.datetime.utcnow().isoformat()
def create_artifacts_dir(dir_path=None, prefix=None):
"""Create Ansible artifacts directory
def _create_directory(path):
"""Create directory and all intemediate directories in the path.
:param dir_path: Directory asbolute path
:type dir_path: `string`
:param prefix: Playbook name
:type prefix: `string`
:return: The UUID of the validation and the absolute Path of the log file
:param log_path: selected dir absolute path
:log_path type: `string`
:return: verified path
:rtype: `string`
"""
if not os.path.exists(path):
os.makedirs(path)
return path
def _create_default_log_dir():
"""Create default validations log dir.
Log the failure if encountering OSError or PermissionError.
:raises: RuntimeError
"""
try:
log_path = "{}".format(constants.VALIDATIONS_LOG_BASEDIR)
os.makedirs(log_path)
except (OSError, PermissionError):
LOG.error(
(
"Error while creating the log directory. "
"Please check the access rights for: '{}'"
).format(log_path)
)
raise RuntimeError()
def get_results_dirs(log_path):
"""Retrieve log directory and the Ansible artifacts directory.
This function contains additional checks for the log directory
access as well as possible fallback.
In order, following locations are considered for artifacts directory:
`log_path` parameter
`validations_libs.constants.VALIDATIONS_LOG_BASEDIR`
Error and warning is logged, to notify the user about remediation,
and possible complexities it may lead to.
Attempt to create default log directory if it's missing.
.. note::
The reason why we are not using the os.path.access function
to greatly simplify this process is because we would break with
recommended practices described in the Python docs.
:param log_path: Absolute log directory path
:type log_path: `string`
:return: log path and artifacts path
:rtype: `string`, `string`
"""
dir_path = (dir_path if dir_path else
constants.VALIDATION_ANSIBLE_ARTIFACT_PATH)
validation_uuid = str(uuid.uuid4())
log_dir = "{}/{}_{}_{}".format(dir_path, validation_uuid,
(prefix if prefix else ''), current_time())
try:
os.makedirs(log_dir)
return validation_uuid, log_dir
except OSError:
artifact_dir = os.path.join(log_path, 'artifacts')
artifacts_dir = _create_directory(artifact_dir)
except (OSError, PermissionError):
LOG.error(
(
"Error while creating Ansible artifacts directory. "
"Please check the access rights for: '{}'"
).format(log_path)
)
log_path = constants.VALIDATIONS_LOG_BASEDIR
LOG.warning(
(
"Requested log folder unavailable, defaulting to: '{}'"
).format(log_path)
)
if not os.path.exists(log_path):
LOG.warning(
(
"Default log folder unavailable, creating: '{}'"
).format(log_path)
)
_create_default_log_dir()
artifact_dir = os.path.join(log_path, 'artifacts')
artifacts_dir = _create_directory(log_path)
LOG.warning(
(
"Selected artifact folder unavailable, defaulting to: '{}'"
).format(artifacts_dir)
)
return log_path, artifacts_dir
def get_artifacts_subdir(log_dir, prefix=''):
"""Retrieve Ansible artifacts directory
:param log_dir: Directory absolute path
:type log_dir: `string`
:param prefix: Playbook name
:type prefix: `string`
:return: The UUID of the validation and the absolute path of the log file
:rtype: `string`, `string`
"""
try:
validation_uuid = str(uuid.uuid4())
artifact_sub_dir = "{}_{}_{}".format(
validation_uuid,
prefix,
current_time())
artifact_sub_dir = os.path.join(log_dir, artifact_sub_dir)
os.makedirs(artifact_sub_dir)
except (OSError, PermissionError):
LOG.exception(
(
"Error while creating Ansible artifacts log file."
"Please check the access rights for {}"
"Error while creating Ansible artifacts directory. "
"Please check the access rights for: '{}'"
).format(log_dir)
)
raise RuntimeError()
return validation_uuid, artifact_sub_dir
def parse_all_validations_on_disk(path, groups=None):
"""Return a list of validations metadata which can be sorted by Groups

View File

@ -228,7 +228,8 @@ class ValidationActions(object):
extra_env_vars=None, ansible_cfg=None, quiet=True,
workdir=None, limit_hosts=None, run_async=False,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
log_path=None, python_interpreter=None,
log_path=constants.VALIDATIONS_LOG_BASEDIR,
python_interpreter=None,
skip_list=None,
callback_whitelist=None,
output_callback='validation_stdout',
@ -268,6 +269,8 @@ class ValidationActions(object):
``constants.DEFAULT_VALIDATIONS_BASEDIR``)
:type base_dir: ``string``
:param log_path: The absolute path of the validations logs directory
(Defaults to
``constants.VALIDATIONS_LOG_BASEDIR``)
:type log_path: ``string``
:param python_interpreter: Path to the Python interpreter to be
used for module execution on remote targets,
@ -353,6 +356,7 @@ class ValidationActions(object):
else:
raise RuntimeError("No validations found")
log_path, artifacts_path = v_utils.get_results_dirs(log_path)
self.log.debug('Running the validations with Ansible')
results = []
for playbook in playbooks:
@ -362,11 +366,12 @@ class ValidationActions(object):
play_name,
limit_hosts)
if _play:
validation_uuid, artifacts_dir = v_utils.create_artifacts_dir(
dir_path=log_path, prefix=os.path.basename(playbook))
run_ansible = v_ansible(validation_uuid)
val_uuid, art_dir = v_utils.get_artifacts_subdir(
log_dir=artifacts_path,
prefix=os.path.basename(playbook))
run_ansible = v_ansible(val_uuid)
_playbook, _rc, _status = run_ansible.run(
workdir=artifacts_dir,
workdir=art_dir,
playbook=playbook,
base_dir=base_dir,
playbook_dir=validations_dir,
@ -380,7 +385,7 @@ class ValidationActions(object):
extra_env_variables=extra_env_vars,
ansible_cfg=ansible_cfg,
gathering_policy='explicit',
ansible_artifact_path=artifacts_dir,
ansible_artifact_path=art_dir,
log_path=log_path,
run_async=run_async,
python_interpreter=python_interpreter,
@ -389,7 +394,7 @@ class ValidationActions(object):
'rc_code': _rc,
'status': _status,
'validations': _playbook.split('.')[0],
'UUID': validation_uuid,
'UUID': val_uuid,
})
else:
self.log.debug('Skipping Validations: {}'.format(playbook))
@ -398,7 +403,7 @@ class ValidationActions(object):
return results
# Return log results
uuid = [id['UUID'] for id in results]
vlog = ValidationLogs()
vlog = ValidationLogs(log_path)
return vlog.get_results(uuid)
def group_information(self, groups):