Add validation log object representation
Create two classes for Log and Logs in order to get a simpler representation of the validation Log and provide easy way to manage the log data informations Change-Id: I743355cef943e43492264d4c4700bebbeab9a37d
This commit is contained in:
parent
fd95a175b7
commit
bcd72301f6
@ -18,12 +18,14 @@ from unittest import TestCase
|
||||
|
||||
from validations_libs import utils
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_logs import ValidationLogs
|
||||
|
||||
|
||||
class TestUtils(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestUtils, self).setUp()
|
||||
self.vlog = ValidationLogs()
|
||||
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
@ -36,7 +38,8 @@ class TestUtils(TestCase):
|
||||
self.assertEqual(res, output)
|
||||
|
||||
def test_get_validations_stats(self):
|
||||
res = utils.get_validations_stats(fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
res = self.vlog.get_validations_stats(
|
||||
fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
self.assertEqual(res, fakes.VALIDATIONS_STATS)
|
||||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
|
@ -25,12 +25,13 @@ class TestValidatorRun(TestCase):
|
||||
def setUp(self):
|
||||
super(TestValidatorRun, self).setUp()
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run')
|
||||
@mock.patch('validations_libs.utils.create_artifacts_dir',
|
||||
return_value=('1234', '/tmp/'))
|
||||
def test_validation_run_success(self, mock_tmp, mock_ansible_run,
|
||||
mock_validation_dir):
|
||||
mock_validation_dir, mock_results):
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
@ -39,15 +40,20 @@ class TestValidatorRun(TestCase):
|
||||
'parameters': {}}]
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
|
||||
|
||||
expected_run_return = [
|
||||
{'validation': {'playbook': 'foo.yaml',
|
||||
'rc_code': 0,
|
||||
'status': 'successful',
|
||||
'validation_id': '1234'}},
|
||||
{'validation': {'playbook': 'foo.yaml',
|
||||
'rc_code': 0,
|
||||
'status': 'successful',
|
||||
'validation_id': '1234'}}]
|
||||
mock_results.return_value = [{'Duration': '0:00:01.761',
|
||||
'Host_Group': 'overcloud',
|
||||
'Status': 'PASSED',
|
||||
'Status_by_Host': 'subnode-1,PASSED',
|
||||
'UUID': 'foo',
|
||||
'Unreachable_Hosts': '',
|
||||
'Validations': 'ntp'}]
|
||||
expected_run_return = [{'Duration': '0:00:01.761',
|
||||
'Host_Group': 'overcloud',
|
||||
'Status': 'PASSED',
|
||||
'Status_by_Host': 'subnode-1,PASSED',
|
||||
'UUID': 'foo',
|
||||
'Unreachable_Hosts': '',
|
||||
'Validations': 'ntp'}]
|
||||
|
||||
playbook = ['fake.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
@ -58,12 +64,13 @@ class TestValidatorRun(TestCase):
|
||||
validations_dir='/tmp/foo')
|
||||
self.assertEqual(run_return, expected_run_return)
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run')
|
||||
@mock.patch('validations_libs.utils.create_artifacts_dir',
|
||||
return_value=('1234', '/tmp/'))
|
||||
def test_validation_run_failed(self, mock_tmp, mock_ansible_run,
|
||||
mock_validation_dir):
|
||||
mock_validation_dir, mock_results):
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
@ -71,16 +78,20 @@ class TestValidatorRun(TestCase):
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {}}]
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'failed')
|
||||
|
||||
expected_run_return = [
|
||||
{'validation': {'playbook': 'foo.yaml',
|
||||
'rc_code': 0,
|
||||
'status': 'failed',
|
||||
'validation_id': '1234'}},
|
||||
{'validation': {'playbook': 'foo.yaml',
|
||||
'rc_code': 0,
|
||||
'status': 'failed',
|
||||
'validation_id': '1234'}}]
|
||||
mock_results.return_value = [{'Duration': '0:00:01.761',
|
||||
'Host_Group': 'overcloud',
|
||||
'Status': 'PASSED',
|
||||
'Status_by_Host': 'subnode-1,PASSED',
|
||||
'UUID': 'foo',
|
||||
'Unreachable_Hosts': '',
|
||||
'Validations': 'ntp'}]
|
||||
expected_run_return = [{'Duration': '0:00:01.761',
|
||||
'Host_Group': 'overcloud',
|
||||
'Status': 'PASSED',
|
||||
'Status_by_Host': 'subnode-1,PASSED',
|
||||
'UUID': 'foo',
|
||||
'Unreachable_Hosts': '',
|
||||
'Validations': 'ntp'}]
|
||||
|
||||
playbook = ['fake.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
|
@ -29,7 +29,8 @@ class TestValidatorShow(TestCase):
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_logs_on_disk',
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_all_logfiles_content',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_validation_show(self, mock_open, mock_parse_validation, mock_data,
|
||||
|
@ -14,11 +14,9 @@
|
||||
#
|
||||
import datetime
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import time
|
||||
|
||||
from validations_libs import constants
|
||||
from validations_libs.group import Group
|
||||
@ -56,9 +54,7 @@ def parse_all_validations_on_disk(path, groups=None):
|
||||
results = []
|
||||
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
|
||||
if isinstance(groups, six.string_types):
|
||||
group_list = []
|
||||
group_list.append(groups)
|
||||
groups = group_list
|
||||
groups = [groups]
|
||||
|
||||
for pl in validations_abspath:
|
||||
val = Validation(pl)
|
||||
@ -86,37 +82,6 @@ def get_validation_group_name_list(groups_path=None):
|
||||
return gp.get_groups_keys_list
|
||||
|
||||
|
||||
def get_new_validations_logs_on_disk(validations_logs_dir):
|
||||
"""Return a list of new log execution filenames """
|
||||
files = []
|
||||
|
||||
for root, dirs, filenames in os.walk(validations_logs_dir):
|
||||
files = [
|
||||
f for f in filenames if not f.startswith('processed')
|
||||
and os.path.splitext(f)[1] == '.json'
|
||||
]
|
||||
return files
|
||||
|
||||
|
||||
def parse_all_validations_logs_on_disk(uuid_run=None, validation_id=None):
|
||||
results = []
|
||||
path = constants.VALIDATIONS_LOG_BASEDIR
|
||||
logfile = "{}/*.json".format(path)
|
||||
|
||||
if validation_id:
|
||||
logfile = "{}/*_{}_*.json".format(path, validation_id)
|
||||
if uuid_run:
|
||||
logfile = "{}/*_{}_*.json".format(path, uuid_run)
|
||||
|
||||
logfiles_path = glob.glob(logfile)
|
||||
|
||||
for logfile_path in logfiles_path:
|
||||
with open(logfile_path, 'r') as log:
|
||||
contents = json.load(log)
|
||||
results.append(contents)
|
||||
return results
|
||||
|
||||
|
||||
def get_validations_details(validation):
|
||||
"""Return validations information"""
|
||||
results = parse_all_validations_on_disk(constants.ANSIBLE_VALIDATION_DIR)
|
||||
@ -134,39 +99,6 @@ def get_validations_data(validation):
|
||||
return Validation(validation).get_formated_data
|
||||
|
||||
|
||||
def get_validations_stats(log):
|
||||
"""Return validations stats from a log file"""
|
||||
# Get validation stats
|
||||
total_number = len(log)
|
||||
failed_number = 0
|
||||
passed_number = 0
|
||||
last_execution = None
|
||||
dates = []
|
||||
|
||||
for l in log:
|
||||
if l.get('validation_output'):
|
||||
failed_number += 1
|
||||
else:
|
||||
passed_number += 1
|
||||
|
||||
date_time = \
|
||||
l['plays'][0]['play']['duration'].get('start').split('T')
|
||||
date_start = date_time[0]
|
||||
time_start = date_time[1].split('Z')[0]
|
||||
newdate = \
|
||||
time.strptime(date_start + time_start, '%Y-%m-%d%H:%M:%S.%f')
|
||||
dates.append(newdate)
|
||||
|
||||
if dates:
|
||||
last_execution = time.strftime('%Y-%m-%d %H:%M:%S', max(dates))
|
||||
|
||||
return {"Last execution date": last_execution,
|
||||
"Number of execution": "Total: {}, Passed: {}, "
|
||||
"Failed: {}".format(total_number,
|
||||
passed_number,
|
||||
failed_number)}
|
||||
|
||||
|
||||
def get_validations_parameters(validations_data, validation_name=[],
|
||||
groups=[]):
|
||||
params = {}
|
||||
|
@ -18,6 +18,7 @@ import os
|
||||
|
||||
from validations_libs.ansible import Ansible as v_ansible
|
||||
from validations_libs.group import Group
|
||||
from validations_libs.validation_logs import ValidationLogs
|
||||
from validations_libs import constants
|
||||
from validations_libs import utils as v_utils
|
||||
|
||||
@ -46,13 +47,15 @@ class ValidationActions(object):
|
||||
val.get('groups')))
|
||||
return (column_name, return_values)
|
||||
|
||||
def show_validations(self, validation):
|
||||
def show_validations(self, validation,
|
||||
log_path=constants.VALIDATIONS_LOG_BASEDIR):
|
||||
"""Display detailed information about a Validation"""
|
||||
self.log = logging.getLogger(__name__ + ".show_validations")
|
||||
# Get validation data:
|
||||
vlog = ValidationLogs(log_path)
|
||||
data = v_utils.get_validations_data(validation)
|
||||
format = v_utils.get_validations_stats(
|
||||
v_utils.parse_all_validations_logs_on_disk())
|
||||
logfiles = vlog.get_all_logfiles_content()
|
||||
format = vlog.get_validations_stats(logfiles)
|
||||
data.update(format)
|
||||
return data
|
||||
|
||||
@ -119,9 +122,11 @@ class ValidationActions(object):
|
||||
'playbook': _playbook,
|
||||
'rc_code': _rc,
|
||||
'status': _status,
|
||||
'validation_id': validation_uuid
|
||||
'validation_id': _playbook.split('.')[0]
|
||||
}})
|
||||
return results
|
||||
# Return log results
|
||||
vlog = ValidationLogs()
|
||||
return vlog.get_results(validation_uuid)
|
||||
|
||||
def group_information(self, groups):
|
||||
"""Get Information about Validation Groups"""
|
||||
|
224
validations_libs/validation_logs.py
Normal file
224
validations_libs/validation_logs.py
Normal file
@ -0,0 +1,224 @@
|
||||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import yaml
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
|
||||
from validations_libs import constants
|
||||
|
||||
LOG = logging.getLogger(__name__ + ".validation_logs")
|
||||
|
||||
|
||||
class ValidationLog(object):
|
||||
|
||||
def __init__(self, uuid=None, validation_id=None, logfile=None,
|
||||
log_path=constants.VALIDATIONS_LOG_BASEDIR,
|
||||
extension='json'):
|
||||
# Set properties
|
||||
self.uuid = uuid
|
||||
self.validation_id = validation_id
|
||||
self.log_path = log_path
|
||||
self.extension = extension
|
||||
# Get full path and content
|
||||
if logfile:
|
||||
full_path = logfile
|
||||
else:
|
||||
full_path = self.get_log_path()
|
||||
self.content = self._get_content(full_path)
|
||||
self.name = os.path.splitext(os.path.basename(full_path))[0]
|
||||
if logfile:
|
||||
self.uuid, self.validation_id, self.datetime = \
|
||||
self.name.replace('.{}'.format(self.extension), '').split('_')
|
||||
|
||||
def _get_content(self, file):
|
||||
with open(file, 'r') as log_file:
|
||||
return yaml.safe_load(log_file)
|
||||
|
||||
def get_log_path(self):
|
||||
"""Return full path of a validation log"""
|
||||
# We return occurence 0, because it should be a uniq file name:
|
||||
return glob.glob("{}/{}_{}_*.{}".format(self.log_path,
|
||||
self.uuid, self.validation_id,
|
||||
self.extension))[0]
|
||||
|
||||
@property
|
||||
def get_logfile_infos(self):
|
||||
"""
|
||||
Return log file information:
|
||||
uuid,
|
||||
validation_id,
|
||||
datatime
|
||||
"""
|
||||
return self.name.replace('.{}'.format(self.extension), '').split('_')
|
||||
|
||||
@property
|
||||
def get_logfile_datetime(self):
|
||||
"""Return log file datetime from a UUID and a validation ID"""
|
||||
return self.name.replace('.{}'.format(self.extension),
|
||||
'').split('_')[2]
|
||||
|
||||
@property
|
||||
def get_logfile_content(self):
|
||||
"""Return logfile content as a dict"""
|
||||
return self.content
|
||||
|
||||
@property
|
||||
def get_uuid(self):
|
||||
"""Return log uuid"""
|
||||
return self.uuid
|
||||
|
||||
@property
|
||||
def get_validation_id(self):
|
||||
"""Return validation id"""
|
||||
return self.validation_id
|
||||
|
||||
@property
|
||||
def get_status(self):
|
||||
"""Return validation status"""
|
||||
failed = 0
|
||||
for h in self.content['stats'].keys():
|
||||
if self.content['stats'][h].get('failures'):
|
||||
failed += 1
|
||||
return ('FAILED' if failed else 'PASSED')
|
||||
|
||||
@property
|
||||
def get_host_group(self):
|
||||
"""Return host group"""
|
||||
return ', '.join([play['play'].get('host') for
|
||||
play in self.content['plays']])
|
||||
|
||||
@property
|
||||
def get_hosts_status(self):
|
||||
"""Return hosts status"""
|
||||
hosts = []
|
||||
for h in self.content['stats'].keys():
|
||||
if self.content['stats'][h].get('failures'):
|
||||
hosts.append('{},{}'.format(h, 'FAILED'))
|
||||
else:
|
||||
hosts.append('{},{}'.format(h, 'PASSED'))
|
||||
return ', '.join(hosts)
|
||||
|
||||
@property
|
||||
def get_unreachable_hosts(self):
|
||||
"""Return unreachable hosts"""
|
||||
return ', '.join(h for h in self.content['stats'].keys()
|
||||
if self.content['stats'][h].get('unreachable'))
|
||||
|
||||
@property
|
||||
def get_duration(self):
|
||||
"""Return duration of Ansible runtime"""
|
||||
return ', '.join([play['play']['duration'].get('time_elapsed') for
|
||||
play in self.content['plays']])
|
||||
|
||||
|
||||
class ValidationLogs(object):
|
||||
|
||||
def __init__(self, logs_path=constants.VALIDATIONS_LOG_BASEDIR):
|
||||
self.logs_path = logs_path
|
||||
|
||||
def _get_content(self, file):
|
||||
with open(file, 'r') as log_file:
|
||||
return yaml.safe_load(log_file)[0]
|
||||
|
||||
def get_logfile_by_uuid(self, uuid):
|
||||
"""Return logfiles by uuid"""
|
||||
return glob.glob("{}/{}_*".format(self.logs_path, uuid))
|
||||
|
||||
def get_logfile_content_by_uuid(self, uuid):
|
||||
"""Return logfiles content by uuid"""
|
||||
log_files = glob.glob("{}/{}_*".format(self.logs_path, uuid))
|
||||
return [self._get_content(l) for l in log_files]
|
||||
|
||||
def get_logfile_by_uuid_validation_id(self, uuid, validation_id):
|
||||
"""Return logfiles by uuid"""
|
||||
return glob.glob("{}/{}_{}_*".format(self.logs_path, uuid,
|
||||
validation_id))
|
||||
|
||||
def get_logfile_content_by_uuid_validation_id(self, uuid, validation_id):
|
||||
"""Return logfiles content filter by uuid and content"""
|
||||
log_files = glob.glob("{}/{}_{}_*".format(self.logs_path, uuid,
|
||||
validation_id))
|
||||
return [self._get_content(l) for l in log_files]
|
||||
|
||||
def get_all_logfiles(self):
|
||||
"""Return logfiles from logs_path"""
|
||||
return [f for f in listdir(self.logs_path) if
|
||||
isfile(join(self.logs_path, f))]
|
||||
|
||||
def get_all_logfiles_content(self):
|
||||
"""Return logfiles content filter by uuid and content"""
|
||||
return [self._get_content(f) for f in listdir(self.logs_path) if
|
||||
isfile(join(self.logs_path, f))]
|
||||
|
||||
def get_validations_stats(self, logs):
|
||||
"""
|
||||
Return validations stats from log files
|
||||
logs: list of dict
|
||||
"""
|
||||
if not isinstance(logs, list):
|
||||
logs = [logs]
|
||||
# Get validation stats
|
||||
total_number = len(logs)
|
||||
failed_number = 0
|
||||
passed_number = 0
|
||||
last_execution = None
|
||||
dates = []
|
||||
for l in logs:
|
||||
if l.get('validation_output'):
|
||||
failed_number += 1
|
||||
else:
|
||||
passed_number += 1
|
||||
date_time = \
|
||||
l['plays'][0]['play']['duration'].get('start').split('T')
|
||||
date_start = date_time[0]
|
||||
time_start = date_time[1].split('Z')[0]
|
||||
newdate = \
|
||||
time.strptime(date_start + time_start, '%Y-%m-%d%H:%M:%S.%f')
|
||||
dates.append(newdate)
|
||||
|
||||
if dates:
|
||||
last_execution = time.strftime('%Y-%m-%d %H:%M:%S', max(dates))
|
||||
|
||||
return {"Last execution date": last_execution,
|
||||
"Number of execution": "Total: {}, Passed: {}, "
|
||||
"Failed: {}".format(total_number,
|
||||
passed_number,
|
||||
failed_number)}
|
||||
|
||||
def get_results(self, uuid, validation_id=None):
|
||||
"""
|
||||
Return a list of validation results by uuid
|
||||
Can be filter by validation_id
|
||||
"""
|
||||
results = (self.get_logfile_by_uuid_validation_id(uuid,
|
||||
validation_id)
|
||||
if validation_id else self.get_logfile_by_uuid(uuid))
|
||||
data = {}
|
||||
res = []
|
||||
for result in results:
|
||||
vlog = ValidationLog(logfile=result)
|
||||
data['UUID'] = vlog.get_uuid
|
||||
data['Validations'] = vlog.get_validation_id
|
||||
data['Status'] = vlog.get_status
|
||||
data['Host_Group'] = vlog.get_host_group
|
||||
data['Status_by_Host'] = vlog.get_hosts_status
|
||||
data['Unreachable_Hosts'] = vlog.get_unreachable_hosts
|
||||
data['Duration'] = vlog.get_duration
|
||||
res.append(data)
|
||||
return res
|
Loading…
Reference in New Issue
Block a user