Merge "Filter get_all_log_file by extension"

This commit is contained in:
Zuul 2020-06-15 20:16:02 +00:00 committed by Gerrit Code Review
commit e1caff945f
4 changed files with 72 additions and 10 deletions

View File

@ -206,3 +206,21 @@ class TestValidationLog(TestCase):
ValidationLog, ValidationLog,
logfile='non-existing.yaml' logfile='non-existing.yaml'
) )
@mock.patch('json.load')
@mock.patch('six.moves.builtins.open')
def test_log_bad_json(self, mock_open, mock_json):
mock_json.side_effect = ValueError()
self.assertRaises(
ValueError,
ValidationLog,
logfile='bad.json'
)
@mock.patch('json.load',
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
@mock.patch('six.moves.builtins.open')
def test_is_valid_format(self, mock_open, mock_json):
val = ValidationLog(
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
self.assertTrue(val.is_valid_format())

View File

@ -131,6 +131,38 @@ class TestValidationLogs(TestCase):
self.assertEquals(log, self.assertEquals(log,
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json']) ['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
@mock.patch('os.path.isfile')
@mock.patch('os.listdir')
@mock.patch('json.load',
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
@mock.patch('six.moves.builtins.open')
def test_get_all_logfiles_yaml(self, mock_open, mock_json,
mock_listdir, mock_isfile):
mock_listdir.return_value = \
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json',
'/tmp/123_foo_2020-03-30T13:17:22.447857Z.yaml']
mock_isfile.return_value = True
vlogs = ValidationLogs('/tmp/foo')
log = vlogs.get_all_logfiles(extension='yaml')
self.assertEquals(log,
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.yaml'])
@mock.patch('os.path.isfile')
@mock.patch('os.listdir')
@mock.patch('json.load',
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
@mock.patch('six.moves.builtins.open')
def test_get_all_logfiles_bad_name(self, mock_open, mock_json,
mock_listdir, mock_isfile):
mock_listdir.return_value = \
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json',
'/tmp/fooo_json.py']
mock_isfile.return_value = True
vlogs = ValidationLogs('/tmp/foo')
log = vlogs.get_all_logfiles()
self.assertEquals(log,
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
@mock.patch('os.path.isfile') @mock.patch('os.path.isfile')
@mock.patch('os.listdir') @mock.patch('os.listdir')
@mock.patch('json.load', @mock.patch('json.load',

View File

@ -62,11 +62,13 @@ class ValidationActions(object):
group=None, extra_vars=None, validations_dir=None, group=None, extra_vars=None, validations_dir=None,
validation_name=None, extra_env_vars=None, validation_name=None, extra_env_vars=None,
ansible_cfg=None, quiet=True, workdir=None, ansible_cfg=None, quiet=True, workdir=None,
limit_hosts=None, run_async=False): limit_hosts=None, run_async=False,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR):
self.log = logging.getLogger(__name__ + ".run_validations") self.log = logging.getLogger(__name__ + ".run_validations")
playbooks = [] playbooks = []
validations_dir = (validations_dir if validations_dir validations_dir = (validations_dir if validations_dir
else self.validation_path) else self.validation_path)
if playbook: if playbook:
if isinstance(playbook, list): if isinstance(playbook, list):
playbooks = playbook playbooks = playbook
@ -104,6 +106,7 @@ class ValidationActions(object):
_playbook, _rc, _status = run_ansible.run( _playbook, _rc, _status = run_ansible.run(
workdir=artifacts_dir, workdir=artifacts_dir,
playbook=playbook, playbook=playbook,
base_dir=base_dir,
playbook_dir=validations_dir, playbook_dir=validations_dir,
parallel_run=True, parallel_run=True,
inventory=inventory, inventory=inventory,
@ -155,11 +158,11 @@ class ValidationActions(object):
f.write(params) f.write(params)
return params return params
def show_history(self, validation_id=None): def show_history(self, validation_id=None, extension='json'):
"""Return validations history""" """Return validations history"""
vlogs = ValidationLogs() vlogs = ValidationLogs(self.validation_path)
logs = (vlogs.get_logfile_by_validation(validation_id) logs = (vlogs.get_logfile_by_validation(validation_id)
if validation_id else vlogs.get_all_logfiles()) if validation_id else vlogs.get_all_logfiles(extension))
values = [] values = []
column_name = ('UUID', 'Validations', column_name = ('UUID', 'Validations',
@ -168,8 +171,8 @@ class ValidationActions(object):
for log in logs: for log in logs:
vlog = ValidationLog(logfile=log) vlog = ValidationLog(logfile=log)
values.append((vlog.get_uuid, vlog.validation_id, if vlog.is_valid_format():
vlog.get_status, vlog.get_start_time, values.append((vlog.get_uuid, vlog.validation_id,
vlog.get_duration)) vlog.get_status, vlog.get_start_time,
vlog.get_duration))
return (column_name, values) return (column_name, values)

View File

@ -57,6 +57,9 @@ class ValidationLog(object):
except IOError: except IOError:
msg = "log file: {} not found".format(file) msg = "log file: {} not found".format(file)
raise IOError(msg) raise IOError(msg)
except ValueError:
msg = "bad json format for {}".format(file)
raise ValueError(msg)
def get_log_path(self): def get_log_path(self):
"""Return full path of a validation log""" """Return full path of a validation log"""
@ -65,6 +68,11 @@ class ValidationLog(object):
self.uuid, self.validation_id, self.uuid, self.validation_id,
self.extension))[0] self.extension))[0]
def is_valid_format(self):
""" Return True if the log file is a valid validation format """
validation_keys = ['stats', 'validation_output', 'plays']
return bool(set(validation_keys).intersection(self.content.keys()))
@property @property
def get_logfile_infos(self): def get_logfile_infos(self):
""" """
@ -186,10 +194,11 @@ class ValidationLogs(object):
validation_id)) validation_id))
return [self._get_content(log) for log in log_files] return [self._get_content(log) for log in log_files]
def get_all_logfiles(self): def get_all_logfiles(self, extension='json'):
"""Return logfiles from logs_path""" """Return logfiles from logs_path"""
return [join(self.logs_path, f) for f in os.listdir(self.logs_path) if return [join(self.logs_path, f) for f in os.listdir(self.logs_path) if
os.path.isfile(join(self.logs_path, f))] os.path.isfile(join(self.logs_path, f)) and
extension in os.path.splitext(join(self.logs_path, f))[1]]
def get_all_logfiles_content(self): def get_all_logfiles_content(self):
"""Return logfiles content filter by uuid and content""" """Return logfiles content filter by uuid and content"""