Merge "Filter get_all_log_file by extension"
This commit is contained in:
commit
e1caff945f
@ -206,3 +206,21 @@ class TestValidationLog(TestCase):
|
||||
ValidationLog,
|
||||
logfile='non-existing.yaml'
|
||||
)
|
||||
|
||||
@mock.patch('json.load')
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_log_bad_json(self, mock_open, mock_json):
|
||||
mock_json.side_effect = ValueError()
|
||||
self.assertRaises(
|
||||
ValueError,
|
||||
ValidationLog,
|
||||
logfile='bad.json'
|
||||
)
|
||||
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_is_valid_format(self, mock_open, mock_json):
|
||||
val = ValidationLog(
|
||||
logfile='/tmp/123_foo_2020-03-30T13:17:22.447857Z.json')
|
||||
self.assertTrue(val.is_valid_format())
|
||||
|
@ -131,6 +131,38 @@ class TestValidationLogs(TestCase):
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_all_logfiles_yaml(self, mock_open, mock_json,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json',
|
||||
'/tmp/123_foo_2020-03-30T13:17:22.447857Z.yaml']
|
||||
mock_isfile.return_value = True
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_all_logfiles(extension='yaml')
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.yaml'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('json.load',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST[0])
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_all_logfiles_bad_name(self, mock_open, mock_json,
|
||||
mock_listdir, mock_isfile):
|
||||
mock_listdir.return_value = \
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json',
|
||||
'/tmp/fooo_json.py']
|
||||
mock_isfile.return_value = True
|
||||
vlogs = ValidationLogs('/tmp/foo')
|
||||
log = vlogs.get_all_logfiles()
|
||||
self.assertEquals(log,
|
||||
['/tmp/123_foo_2020-03-30T13:17:22.447857Z.json'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
@mock.patch('json.load',
|
||||
|
@ -62,11 +62,13 @@ class ValidationActions(object):
|
||||
group=None, extra_vars=None, validations_dir=None,
|
||||
validation_name=None, extra_env_vars=None,
|
||||
ansible_cfg=None, quiet=True, workdir=None,
|
||||
limit_hosts=None, run_async=False):
|
||||
limit_hosts=None, run_async=False,
|
||||
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR):
|
||||
self.log = logging.getLogger(__name__ + ".run_validations")
|
||||
playbooks = []
|
||||
validations_dir = (validations_dir if validations_dir
|
||||
else self.validation_path)
|
||||
|
||||
if playbook:
|
||||
if isinstance(playbook, list):
|
||||
playbooks = playbook
|
||||
@ -104,6 +106,7 @@ class ValidationActions(object):
|
||||
_playbook, _rc, _status = run_ansible.run(
|
||||
workdir=artifacts_dir,
|
||||
playbook=playbook,
|
||||
base_dir=base_dir,
|
||||
playbook_dir=validations_dir,
|
||||
parallel_run=True,
|
||||
inventory=inventory,
|
||||
@ -155,11 +158,11 @@ class ValidationActions(object):
|
||||
f.write(params)
|
||||
return params
|
||||
|
||||
def show_history(self, validation_id=None):
|
||||
def show_history(self, validation_id=None, extension='json'):
|
||||
"""Return validations history"""
|
||||
vlogs = ValidationLogs()
|
||||
vlogs = ValidationLogs(self.validation_path)
|
||||
logs = (vlogs.get_logfile_by_validation(validation_id)
|
||||
if validation_id else vlogs.get_all_logfiles())
|
||||
if validation_id else vlogs.get_all_logfiles(extension))
|
||||
|
||||
values = []
|
||||
column_name = ('UUID', 'Validations',
|
||||
@ -168,8 +171,8 @@ class ValidationActions(object):
|
||||
|
||||
for log in logs:
|
||||
vlog = ValidationLog(logfile=log)
|
||||
values.append((vlog.get_uuid, vlog.validation_id,
|
||||
vlog.get_status, vlog.get_start_time,
|
||||
vlog.get_duration))
|
||||
|
||||
if vlog.is_valid_format():
|
||||
values.append((vlog.get_uuid, vlog.validation_id,
|
||||
vlog.get_status, vlog.get_start_time,
|
||||
vlog.get_duration))
|
||||
return (column_name, values)
|
||||
|
@ -57,6 +57,9 @@ class ValidationLog(object):
|
||||
except IOError:
|
||||
msg = "log file: {} not found".format(file)
|
||||
raise IOError(msg)
|
||||
except ValueError:
|
||||
msg = "bad json format for {}".format(file)
|
||||
raise ValueError(msg)
|
||||
|
||||
def get_log_path(self):
|
||||
"""Return full path of a validation log"""
|
||||
@ -65,6 +68,11 @@ class ValidationLog(object):
|
||||
self.uuid, self.validation_id,
|
||||
self.extension))[0]
|
||||
|
||||
def is_valid_format(self):
|
||||
""" Return True if the log file is a valid validation format """
|
||||
validation_keys = ['stats', 'validation_output', 'plays']
|
||||
return bool(set(validation_keys).intersection(self.content.keys()))
|
||||
|
||||
@property
|
||||
def get_logfile_infos(self):
|
||||
"""
|
||||
@ -186,10 +194,11 @@ class ValidationLogs(object):
|
||||
validation_id))
|
||||
return [self._get_content(log) for log in log_files]
|
||||
|
||||
def get_all_logfiles(self):
|
||||
def get_all_logfiles(self, extension='json'):
|
||||
"""Return logfiles from logs_path"""
|
||||
return [join(self.logs_path, f) for f in os.listdir(self.logs_path) if
|
||||
os.path.isfile(join(self.logs_path, f))]
|
||||
os.path.isfile(join(self.logs_path, f)) and
|
||||
extension in os.path.splitext(join(self.logs_path, f))[1]]
|
||||
|
||||
def get_all_logfiles_content(self):
|
||||
"""Return logfiles content filter by uuid and content"""
|
||||
|
Loading…
Reference in New Issue
Block a user