Merge "TripleO Validations Logging CLI Introduction"

This commit is contained in:
Zuul 2020-02-29 02:41:35 +00:00 committed by Gerrit Code Review
commit 88986bd7d3
6 changed files with 475 additions and 16 deletions

View File

@ -0,0 +1,7 @@
---
features:
- |
The TripleO Validator CLI has now a new logging feature which keep every
validation execution log in the Undercloud (/var/log/validations/). The CLI
is able to display the history and allow the user to get the full execution
details.

View File

@ -122,7 +122,9 @@ openstack.tripleoclient.v2 =
tripleo_validator_list = tripleoclient.v1.tripleo_validator:TripleOValidatorList
tripleo_validator_run = tripleoclient.v1.tripleo_validator:TripleOValidatorRun
tripleo_validator_show = tripleoclient.v1.tripleo_validator:TripleOValidatorShow
tripleo_validator_show_history = tripleoclient.v1.tripleo_validator:TripleOValidatorShowHistory
tripleo_validator_show_parameter = tripleoclient.v1.tripleo_validator:TripleOValidatorShowParameter
tripleo_validator_show_run = tripleoclient.v1.tripleo_validator:TripleOValidatorShowRun
oslo.config.opts =
undercloud_config = tripleoclient.config.undercloud:list_opts

View File

@ -79,10 +79,14 @@ ENABLE_SSH_ADMIN_SSH_PORT_TIMEOUT = 600
ADDITIONAL_ARCHITECTURES = ['ppc64le']
DEFAULT_VALIDATIONS_BASEDIR = '/usr/share/openstack-tripleo-validations'
VALIDATIONS_LOG_BASEDIR = '/var/log/validations'
DEFAULT_WORK_DIR = '/var/lib/mistral'
ANSIBLE_INVENTORY = \
'/var/lib/mistral/overcloud/tripleo-ansible-inventory.yaml'
ANSIBLE_VALIDATION_DIR = \
'/usr/share/openstack-tripleo-validations/playbooks'
ANSIBLE_TRIPLEO_PLAYBOOKS = \

View File

@ -38,6 +38,110 @@ GROUPS_LIST = [
('group3', 'Group3 description'),
]
VALIDATIONS_LOGS_CONTENTS_LIST = [{
'plays': [{
'play': {
'duration': {
'end': '2019-11-25T13:40:17.538611Z',
'start': '2019-11-25T13:40:14.404623Z',
'time_elapsed': '0:00:03.753'
},
'host': 'undercloud',
'id': '008886df-d297-1eaa-2a74-000000000008',
'validation_id': '512e',
'validation_path':
'/usr/share/openstack-tripleo-validations/playbooks'
},
'tasks': [
{
'hosts': {
'undercloud': {
'_ansible_no_log': False,
'action': 'command',
'changed': False,
'cmd': [u'ls', '/sys/class/block/'],
'delta': '0:00:00.018913',
'end': '2019-11-25 13:40:17.120368',
'invocation': {
'module_args': {
'_raw_params': 'ls /sys/class/block/',
'_uses_shell': False,
'argv': None,
'chdir': None,
'creates': None,
'executable': None,
'removes': None,
'stdin': None,
'stdin_add_newline': True,
'strip_empty_ends': True,
'warn': True
}
},
'rc': 0,
'start': '2019-11-25 13:40:17.101455',
'stderr': '',
'stderr_lines': [],
'stdout': 'vda',
'stdout_lines': [u'vda']
}
},
'task': {
'duration': {
'end': '2019-11-25T13:40:17.336687Z',
'start': '2019-11-25T13:40:14.529880Z'
},
'id':
'008886df-d297-1eaa-2a74-00000000000d',
'name':
'advanced-format-512e-support : List the available drives'
}
},
{
'hosts': {
'undercloud': {
'action':
'advanced_format',
'changed': False,
'msg':
'All items completed',
'results': [{
'_ansible_item_label': 'vda',
'_ansible_no_log': False,
'ansible_loop_var': 'item',
'changed': False,
'item': 'vda',
'skip_reason': 'Conditional result was False',
'skipped': True
}],
'skipped': True
}
},
'task': {
'duration': {
'end': '2019-11-25T13:40:17.538611Z',
'start': '2019-11-25T13:40:17.341704Z'
},
'id': '008886df-d297-1eaa-2a74-00000000000e',
'name':
'advanced-format-512e-support: Detect the drive'
}
}
]
}],
'stats': {
'undercloud': {
'changed': 0,
'failures': 0,
'ignored': 0,
'ok': 1,
'rescued': 0,
'skipped': 1,
'unreachable': 0
}
},
'validation_output': []
}]
class TestValidatorGroupInfo(utils.TestCommand):
@ -114,3 +218,56 @@ class TestValidatorShowParameter(utils.TestCommand):
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
class TestValidatorShowRun(utils.TestCommand):
def setUp(self):
super(TestValidatorShowRun, self).setUp()
# Get the command object to test
self.cmd = tripleo_validator.TripleOValidatorShowRun(self.app,
None)
@mock.patch('tripleoclient.utils.parse_all_validations_logs_on_disk',
return_value=VALIDATIONS_LOGS_CONTENTS_LIST)
def test_validation_show_run(self, mock_validations):
arglist = ['008886df-d297-1eaa-2a74-000000000008']
verifylist = [('uuid', '008886df-d297-1eaa-2a74-000000000008')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
class TestValidatorShowHistory(utils.TestCommand):
def setUp(self):
super(TestValidatorShowHistory, self).setUp()
# Get the command object to test
self.cmd = tripleo_validator.TripleOValidatorShowHistory(self.app,
None)
@mock.patch('tripleoclient.utils.parse_all_validations_logs_on_disk',
return_value=VALIDATIONS_LOGS_CONTENTS_LIST)
def test_validation_show_history(self, mock_validations):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)
@mock.patch('tripleoclient.utils.parse_all_validations_logs_on_disk',
return_value=VALIDATIONS_LOGS_CONTENTS_LIST)
def test_validation_show_history_for_a_validation(self, mock_validations):
arglist = [
'--validation',
'512e'
]
verifylist = [('validation', '512e')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.cmd.take_action(parsed_args)

View File

@ -2170,6 +2170,11 @@ def parse_all_validations_on_disk(path, groups=None):
results = []
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
if isinstance(groups, six.string_types):
group_list = []
group_list.append(groups)
groups = group_list
for pl in validations_abspath:
validation_id, _ext = os.path.splitext(os.path.basename(pl))
@ -2249,6 +2254,40 @@ def get_validations_yaml(validations_data):
indent=2)
def get_new_validations_logs_on_disk():
"""Return a list of new log execution filenames """
files = []
for root, dirs, filenames in os.walk(constants.VALIDATIONS_LOG_BASEDIR):
files = [
f for f in filenames if not f.startswith('processed')
and os.path.splitext(f)[1] == '.json'
]
return files
def parse_all_validations_logs_on_disk(uuid_run=None, validation_id=None):
results = []
path = constants.VALIDATIONS_LOG_BASEDIR
logfile = "{}/*.json".format(path)
if validation_id:
logfile = "{}/*_{}_*.json".format(path, validation_id)
if uuid_run:
logfile = "{}/*_{}_*.json".format(path, uuid_run)
logfiles_path = glob.glob(logfile)
for logfile_path in logfiles_path:
with open(logfile_path, 'r') as log:
contents = json.load(log)
results.append(contents)
return results
def indent(text):
'''Indent the given text by four spaces.'''
return ''.join(' {}\n'.format(line) for line in text.splitlines())

View File

@ -18,12 +18,13 @@ import json
import logging
import os
import six
import sys
import textwrap
import time
from concurrent.futures import ThreadPoolExecutor
from osc_lib import exceptions
from osc_lib.i18n import _
from prettytable import PrettyTable
from tripleoclient import command
from tripleoclient import constants
@ -31,6 +32,13 @@ from tripleoclient import utils as oooutils
LOG = logging.getLogger(__name__ + ".TripleoValidator")
RED = "\033[1;31m"
GREEN = "\033[0;32m"
RESET = "\033[0;0m"
FAILED_VALIDATION = "{}FAILED{}".format(RED, RESET)
PASSED_VALIDATION = "{}PASSED{}".format(GREEN, RESET)
class _CommaListGroupAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
@ -64,8 +72,14 @@ class TripleOValidatorGroupInfo(command.Lister):
raise exceptions.CommandError(
"Could not find groups information file %s" % group_file)
column_name = ("Groups", "Description")
return (column_name, group)
group_info = []
for gp in group:
validations = oooutils.parse_all_validations_on_disk(
constants.ANSIBLE_VALIDATION_DIR, gp[0])
group_info.append((gp[0], gp[1], len(validations)))
column_name = ("Groups", "Description", "Number of Validations")
return (column_name, group_info)
class TripleOValidatorShow(command.ShowOne):
@ -83,11 +97,14 @@ class TripleOValidatorShow(command.ShowOne):
def take_action(self, parsed_args):
validation = self.get_validations_details(parsed_args.validation_id)
logfile_contents = oooutils.parse_all_validations_logs_on_disk(
validation_id=parsed_args.validation_id)
if not validation:
raise exceptions.CommandError(
"Could not find validation %s" % parsed_args.validation_id)
return self.format_validation(validation)
return self.format_validation(validation, logfile_contents)
def get_validations_details(self, validation):
results = oooutils.parse_all_validations_on_disk(
@ -98,7 +115,7 @@ class TripleOValidatorShow(command.ShowOne):
return r
return []
def format_validation(self, validation):
def format_validation(self, validation, logfile):
column_names = ["ID"]
data = [validation.pop('id')]
@ -110,12 +127,51 @@ class TripleOValidatorShow(command.ShowOne):
column_names.append("Description")
data.append(textwrap.fill(validation.pop('description')))
if 'groups' in validation:
column_names.append("Groups")
data.append(", ".join(validation.pop('groups')))
other_fields = list(validation.keys())
other_fields.sort()
for field in other_fields:
column_names.append(field.capitalize())
data.append(validation[field])
# history, stats ...
total_number = 0
failed_number = 0
passed_number = 0
last_execution = None
dates = []
if logfile:
total_number = len(logfile)
for run in logfile:
if 'validation_output' in run and run.get('validation_output'):
failed_number += 1
else:
passed_number += 1
date_time = \
run['plays'][0]['play']['duration'].get('start').split('T')
date_start = date_time[0]
time_start = date_time[1].split('Z')[0]
newdate = \
time.strptime(date_start + time_start, '%Y-%m-%d%H:%M:%S.%f')
dates.append(newdate)
if dates:
last_execution = time.strftime('%Y-%m-%d %H:%M:%S', max(dates))
column_names.append("Number of execution")
data.append("Total: {}, Passed: {}, Failed: {}".format(total_number,
passed_number,
failed_number))
column_names.append("Last execution date")
data.append(last_execution)
return column_names, data
@ -198,7 +254,7 @@ class TripleOValidatorShowParameter(command.Command):
with open(varsfile[-1], 'w') as f:
params = {}
for val_name in data.keys():
for val_name in list(data.keys()):
for k, v in data[val_name].get('parameters').items():
params[k] = v
@ -273,7 +329,7 @@ class TripleOValidatorList(command.Lister):
for val in validations:
return_values.append((val.get('id'), val.get('name'),
val.get('groups')))
", ".join(val.get('groups'))))
return (column_name, return_values)
except Exception as e:
raise RuntimeError(_("Validations listing finished with errors\n"
@ -382,12 +438,19 @@ class TripleOValidatorRun(command.Command):
playbooks.append(val.get('id') + '.yaml')
except Exception as e:
print(
_("Validations listing by group finished with errors"))
_("Getting Validations list by group name"
"finished with errors"))
print('Output: {}'.format(e))
else:
for pb in parsed_args.validation_name:
playbooks.append(pb + '.yaml')
if pb not in constants.VALIDATION_GROUPS:
playbooks.append(pb + '.yaml')
else:
raise exceptions.CommandError(
"Please, use '--group' argument instead of "
"'--validation' to run validation(s) by their name(s)."
)
static_inventory = oooutils.get_tripleo_ansible_inventory(
ssh_user='heat-admin',
@ -409,30 +472,217 @@ class TripleOValidatorRun(command.Command):
playbook_dir=constants.ANSIBLE_VALIDATION_DIR,
parallel_run=True,
inventory=static_inventory,
output_callback='validation_output',
output_callback='validation_json',
quiet=True,
extra_vars=extra_vars_input,
gathering_policy='explicit'): playbook
for playbook in playbooks
}
results = []
for tk, pl in six.iteritems(tasks_exec):
try:
_rc, output = tk.result()
print('[SUCCESS] - {}\n{}'.format(pl, oooutils.indent(output)))
results.append({
'validation': {
'validation_id': pl,
'logfile': None,
'status': 'PASSED',
'output': output
}})
except Exception as e:
failed_val = True
LOG.error('[FAILED] - {}\n{}'.format(
pl, oooutils.indent(e.args[0])))
results.append({
'validation': {
'validation_id': pl,
'logfile': None,
'status': 'FAILED',
'output': str(e)
}})
if results:
new_log_files = oooutils.get_new_validations_logs_on_disk()
for i in new_log_files:
val_id = "{}.yaml".format(i.split('_')[1])
for res in results:
if res['validation'].get('validation_id') == val_id:
res['validation']['logfile'] = \
os.path.join(constants.VALIDATIONS_LOG_BASEDIR, i)
t = PrettyTable(border=True, header=True, padding_width=1)
t.field_names = [
"UUID", "Validations", "Status", "Host Group(s)",
"Status by Host", "Unreachable Host(s)", "Duration"]
for validation in results:
r = []
logfile = validation['validation'].get('logfile', None)
if logfile and os.path.exists(logfile):
with open(logfile, 'r') as val:
contents = json.load(val)
for i in contents['plays']:
host = [
x.encode('utf-8')
for x in i['play'].get('host').split(', ')
]
val_id = i['play'].get('validation_id')
time_elapsed = \
i['play']['duration'].get('time_elapsed', None)
r.append(contents['plays'][0]['play'].get('id'))
r.append(val_id)
if validation['validation'].get('status') == "PASSED":
r.append(PASSED_VALIDATION)
else:
r.append(FAILED_VALIDATION)
unreachable_hosts = []
hosts_result = []
for h in list(contents['stats'].keys()):
ht = h.encode('utf-8')
if contents['stats'][ht]['unreachable'] != 0:
unreachable_hosts.append(ht)
elif contents['stats'][ht]['failures'] != 0:
hosts_result.append("{}{}{}".format(
RED, ht, RESET))
else:
hosts_result.append("{}{}{}".format(
GREEN, ht, RESET))
r.append(", ".join(host))
r.append(", ".join(hosts_result))
r.append("{}{}{}".format(RED,
", ".join(unreachable_hosts),
RESET))
r.append(time_elapsed)
t.add_row(r)
t.sortby = "UUID"
for field in t.field_names:
if field == "Status":
t.align['Status'] = "l"
else:
t.align[field] = "l"
print(t)
if len(new_log_files) > len(results):
LOG.warn(_('Looks like we have more log files than '
'executed validations'))
for i in new_log_files:
os.rename(
"{}/{}".format(constants.VALIDATIONS_LOG_BASEDIR,
i), "{}/processed_{}".format(
constants.VALIDATIONS_LOG_BASEDIR, i))
LOG.debug(_('Removing static tripleo ansible inventory file'))
oooutils.cleanup_tripleo_ansible_inventory_file(
static_inventory)
if failed_val:
LOG.error(_('One or more validations have failed!'))
sys.exit(1)
sys.exit(0)
raise exceptions.CommandError(
_('One or more validations have failed!'))
def take_action(self, parsed_args):
self._run_validator_run(parsed_args)
class TripleOValidatorShowRun(command.Command):
"""Display details about a Validation execution"""
def get_parser(self, prog_name):
parser = argparse.ArgumentParser(
description=self.get_description(),
prog=prog_name,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
add_help=False
)
parser.add_argument('uuid',
metavar="<uuid>",
type=str,
help='Validation UUID Run')
parser.add_argument('--full',
action='store_true',
help='Show Full Details for the run')
return parser
def take_action(self, parsed_args):
logfile_contents = oooutils.parse_all_validations_logs_on_disk(
uuid_run=parsed_args.uuid)
if len(logfile_contents) > 1:
raise exceptions.CommandError(
"Multiple log files found for UUID: %s" % parsed_args.uuid)
if logfile_contents:
if parsed_args.full:
print(oooutils.get_validations_json(logfile_contents[0]))
else:
for data in logfile_contents:
for tasks in data['validation_output']:
print(oooutils.get_validations_json(tasks))
else:
raise exceptions.CommandError(
"Could not find the log file linked to this UUID: %s" %
parsed_args.uuid)
class TripleOValidatorShowHistory(command.Lister):
"""Display Validations execution history"""
def get_parser(self, prog_name):
parser = super(TripleOValidatorShowHistory, self).get_parser(prog_name)
parser.add_argument('--validation',
metavar="<validation>",
type=str,
help='Display execution history for a validation')
return parser
def take_action(self, parsed_args):
logfile_contents = oooutils.parse_all_validations_logs_on_disk(
validation_id=parsed_args.validation)
if not logfile_contents:
msg = "No History Found"
if parsed_args.validation:
raise exceptions.CommandError(
"{} for {}.".format(
msg, parsed_args.validation))
else:
raise exceptions.CommandError(
"{}.".format(msg, parsed_args.validation))
return_values = []
column_name = ('UUID', 'Validations',
'Status', 'Execution at',
'Duration')
for run in logfile_contents:
status = PASSED_VALIDATION
if 'plays' in run and run.get('plays'):
date_time = \
run['plays'][0]['play']['duration'].get('start').split('T')
time_elapsed = \
run['plays'][0]['play']['duration'].get('time_elapsed')
date_start = date_time[0]
time_start = date_time[1].split('Z')[0]
for k, v in six.iteritems(run['stats']):
if v.get('failures') != 0:
status = FAILED_VALIDATION
return_values.append(
(run['plays'][0]['play'].get('id'),
run['plays'][0]['play'].get('validation_id'), status,
"{} {}".format(date_start, time_start), time_elapsed))
return (column_name, return_values)