2020-03-04 09:02:17 +01:00
|
|
|
# Copyright 2020 Red Hat, Inc.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
#
|
|
|
|
import logging
|
|
|
|
import os
|
2020-10-15 11:13:13 +02:00
|
|
|
import json
|
2020-10-27 14:43:55 +01:00
|
|
|
import six
|
2020-10-15 11:13:13 +02:00
|
|
|
import yaml
|
2020-03-04 09:02:17 +01:00
|
|
|
|
|
|
|
from validations_libs.ansible import Ansible as v_ansible
|
2020-03-18 23:10:27 +01:00
|
|
|
from validations_libs.group import Group
|
2020-03-22 22:29:21 +01:00
|
|
|
from validations_libs.validation_logs import ValidationLogs, ValidationLog
|
2020-03-09 14:21:03 +01:00
|
|
|
from validations_libs import constants
|
2020-03-04 09:02:17 +01:00
|
|
|
from validations_libs import utils as v_utils
|
|
|
|
|
2020-03-18 23:10:27 +01:00
|
|
|
LOG = logging.getLogger(__name__ + ".validation_actions")
|
2020-03-04 09:02:17 +01:00
|
|
|
|
|
|
|
|
2020-03-18 23:10:27 +01:00
|
|
|
class ValidationActions(object):
|
2020-03-04 09:02:17 +01:00
|
|
|
|
2020-03-18 23:10:27 +01:00
|
|
|
def __init__(self, validation_path=None, group=None):
|
|
|
|
self.log = logging.getLogger(__name__ + ".ValidationActions")
|
|
|
|
self.validation_path = (validation_path if validation_path
|
|
|
|
else constants.ANSIBLE_VALIDATION_DIR)
|
|
|
|
self.group = group
|
|
|
|
|
|
|
|
def list_validations(self):
|
|
|
|
"""List the available validations"""
|
|
|
|
self.log = logging.getLogger(__name__ + ".list_validations")
|
|
|
|
validations = v_utils.parse_all_validations_on_disk(
|
|
|
|
self.validation_path, self.group)
|
|
|
|
|
|
|
|
return_values = []
|
|
|
|
column_name = ('ID', 'Name', 'Groups')
|
|
|
|
|
|
|
|
for val in validations:
|
|
|
|
return_values.append((val.get('id'), val.get('name'),
|
|
|
|
val.get('groups')))
|
|
|
|
return (column_name, return_values)
|
|
|
|
|
2020-03-22 19:04:07 +01:00
|
|
|
def show_validations(self, validation,
|
|
|
|
log_path=constants.VALIDATIONS_LOG_BASEDIR):
|
2020-03-18 23:10:27 +01:00
|
|
|
"""Display detailed information about a Validation"""
|
|
|
|
self.log = logging.getLogger(__name__ + ".show_validations")
|
|
|
|
# Get validation data:
|
2020-03-22 19:04:07 +01:00
|
|
|
vlog = ValidationLogs(log_path)
|
2020-03-24 14:47:16 +01:00
|
|
|
data = v_utils.get_validations_data(validation, self.validation_path)
|
2020-06-29 22:09:24 +02:00
|
|
|
if not data:
|
|
|
|
msg = "Validation {} not found in the path: {}".format(
|
|
|
|
validation,
|
|
|
|
self.validation_path)
|
|
|
|
raise RuntimeError(msg)
|
2020-10-13 10:59:37 +02:00
|
|
|
logfiles = vlog.get_logfile_content_by_validation(validation)
|
2020-03-22 19:04:07 +01:00
|
|
|
format = vlog.get_validations_stats(logfiles)
|
2020-03-18 23:10:27 +01:00
|
|
|
data.update(format)
|
|
|
|
return data
|
2020-03-04 09:02:17 +01:00
|
|
|
|
2020-06-10 12:22:49 +02:00
|
|
|
def run_validations(self, validation_name=None, inventory='localhost',
|
2020-03-04 17:36:34 +01:00
|
|
|
group=None, extra_vars=None, validations_dir=None,
|
2020-06-10 12:22:49 +02:00
|
|
|
extra_env_vars=None, ansible_cfg=None, quiet=True,
|
|
|
|
workdir=None, limit_hosts=None, run_async=False,
|
2020-07-24 15:05:28 +02:00
|
|
|
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
|
2020-09-03 11:08:01 +02:00
|
|
|
log_path=None, python_interpreter=None):
|
2020-03-04 09:02:17 +01:00
|
|
|
self.log = logging.getLogger(__name__ + ".run_validations")
|
2020-04-09 16:15:42 +02:00
|
|
|
playbooks = []
|
2020-04-08 17:04:26 +02:00
|
|
|
validations_dir = (validations_dir if validations_dir
|
|
|
|
else self.validation_path)
|
2020-03-04 09:02:17 +01:00
|
|
|
if group:
|
|
|
|
self.log.debug('Getting the validations list by group')
|
|
|
|
try:
|
|
|
|
validations = v_utils.parse_all_validations_on_disk(
|
2020-04-08 17:04:26 +02:00
|
|
|
validations_dir, group)
|
2020-03-04 09:02:17 +01:00
|
|
|
for val in validations:
|
|
|
|
playbooks.append(val.get('id') + '.yaml')
|
|
|
|
except Exception as e:
|
|
|
|
raise(e)
|
2020-03-06 11:05:25 +01:00
|
|
|
elif validation_name:
|
2020-10-27 14:43:55 +01:00
|
|
|
if isinstance(validation_name, six.string_types):
|
|
|
|
validation_name = [validation_name]
|
|
|
|
|
2020-04-08 17:04:26 +02:00
|
|
|
playbooks = v_utils.get_validations_playbook(validations_dir,
|
|
|
|
validation_name,
|
|
|
|
group)
|
2020-10-19 14:08:36 +02:00
|
|
|
|
|
|
|
if not playbooks or len(validation_name) != len(playbooks):
|
|
|
|
p = []
|
|
|
|
for play in playbooks:
|
|
|
|
p.append(os.path.basename(os.path.splitext(play)[0]))
|
|
|
|
|
|
|
|
unknown_validation = list(set(validation_name) - set(p))
|
|
|
|
|
|
|
|
msg = "Validation {} not found in {}.".format(
|
|
|
|
unknown_validation, validations_dir)
|
|
|
|
|
2020-05-12 17:12:28 +02:00
|
|
|
raise RuntimeError(msg)
|
2020-03-06 11:05:25 +01:00
|
|
|
else:
|
|
|
|
raise RuntimeError("No validations found")
|
|
|
|
|
2020-03-04 09:02:17 +01:00
|
|
|
self.log.debug('Running the validations with Ansible')
|
|
|
|
results = []
|
2020-03-11 17:07:56 +01:00
|
|
|
for playbook in playbooks:
|
|
|
|
validation_uuid, artifacts_dir = v_utils.create_artifacts_dir(
|
|
|
|
prefix=os.path.basename(playbook))
|
|
|
|
run_ansible = v_ansible(validation_uuid)
|
|
|
|
_playbook, _rc, _status = run_ansible.run(
|
|
|
|
workdir=artifacts_dir,
|
|
|
|
playbook=playbook,
|
2020-06-10 23:19:07 +02:00
|
|
|
base_dir=base_dir,
|
2020-04-08 17:04:26 +02:00
|
|
|
playbook_dir=validations_dir,
|
2020-03-11 17:07:56 +01:00
|
|
|
parallel_run=True,
|
|
|
|
inventory=inventory,
|
|
|
|
output_callback='validation_json',
|
|
|
|
quiet=quiet,
|
|
|
|
extra_vars=extra_vars,
|
2020-04-09 10:03:44 +02:00
|
|
|
limit_hosts=limit_hosts,
|
2020-03-11 17:07:56 +01:00
|
|
|
extra_env_variables=extra_env_vars,
|
|
|
|
ansible_cfg=ansible_cfg,
|
|
|
|
gathering_policy='explicit',
|
2020-04-28 14:29:21 +02:00
|
|
|
ansible_artifact_path=artifacts_dir,
|
2020-07-24 15:05:28 +02:00
|
|
|
log_path=log_path,
|
2020-09-03 11:08:01 +02:00
|
|
|
run_async=run_async,
|
|
|
|
python_interpreter=python_interpreter)
|
2020-04-28 14:29:21 +02:00
|
|
|
results.append({'playbook': _playbook,
|
2020-03-11 17:07:56 +01:00
|
|
|
'rc_code': _rc,
|
|
|
|
'status': _status,
|
2020-04-28 14:29:21 +02:00
|
|
|
'validations': _playbook.split('.')[0],
|
|
|
|
'UUID': validation_uuid,
|
|
|
|
})
|
|
|
|
if run_async:
|
|
|
|
return results
|
2020-03-22 19:04:07 +01:00
|
|
|
# Return log results
|
2020-05-18 12:15:30 +02:00
|
|
|
uuid = [id['UUID'] for id in results]
|
2020-03-22 19:04:07 +01:00
|
|
|
vlog = ValidationLogs()
|
2020-05-18 12:15:30 +02:00
|
|
|
return vlog.get_results(uuid)
|
2020-03-18 23:10:27 +01:00
|
|
|
|
|
|
|
def group_information(self, groups):
|
|
|
|
"""Get Information about Validation Groups"""
|
|
|
|
val_gp = Group(groups)
|
|
|
|
group = val_gp.get_formated_group
|
|
|
|
|
|
|
|
group_info = []
|
|
|
|
# Get validations number by groups
|
|
|
|
for gp in group:
|
|
|
|
validations = v_utils.parse_all_validations_on_disk(
|
2020-04-08 17:04:26 +02:00
|
|
|
self.validation_path, gp[0])
|
2020-03-18 23:10:27 +01:00
|
|
|
group_info.append((gp[0], gp[1], len(validations)))
|
|
|
|
column_name = ("Groups", "Description", "Number of Validations")
|
|
|
|
return (column_name, group_info)
|
2020-03-19 00:22:15 +01:00
|
|
|
|
2020-04-09 14:17:01 +02:00
|
|
|
def show_validations_parameters(self, validation, group=None,
|
|
|
|
format='json', download_file=None):
|
2020-03-19 00:22:15 +01:00
|
|
|
"""Return Validations Parameters"""
|
2020-04-01 15:48:23 +02:00
|
|
|
validations = v_utils.get_validations_playbook(
|
2020-04-08 17:04:26 +02:00
|
|
|
self.validation_path, validation, group)
|
2020-04-09 14:17:01 +02:00
|
|
|
params = v_utils.get_validations_parameters(validations, validation,
|
|
|
|
group, format)
|
|
|
|
if download_file:
|
2020-10-15 11:13:13 +02:00
|
|
|
params_only = {}
|
|
|
|
ext = os.splitext(download_file)[1][1:]
|
2020-04-09 14:17:01 +02:00
|
|
|
with open(download_file, 'w') as f:
|
2020-10-15 11:13:13 +02:00
|
|
|
for val_name in params.keys():
|
|
|
|
for k, v in params[val_name].get('parameters').items():
|
|
|
|
params_only[k] = v
|
|
|
|
|
|
|
|
if ext == 'json':
|
|
|
|
f.write(json.dumps(params_only,
|
|
|
|
indent=4,
|
|
|
|
sort_keys=True))
|
|
|
|
else:
|
|
|
|
f.write(yaml.safe_dump(params_only,
|
|
|
|
allow_unicode=True,
|
|
|
|
default_flow_style=False,
|
|
|
|
indent=2))
|
|
|
|
if format == 'json':
|
|
|
|
return json.dumps(params,
|
|
|
|
indent=4,
|
|
|
|
sort_keys=True)
|
|
|
|
else:
|
|
|
|
return yaml.safe_dump(params,
|
|
|
|
allow_unicode=True,
|
|
|
|
default_flow_style=False,
|
|
|
|
indent=2)
|
2020-03-22 22:29:21 +01:00
|
|
|
|
2020-06-29 22:09:24 +02:00
|
|
|
def show_history(self, validation_id=None, extension='json',
|
|
|
|
log_path=constants.VALIDATIONS_LOG_BASEDIR):
|
2020-03-22 22:29:21 +01:00
|
|
|
"""Return validations history"""
|
2020-06-29 22:09:24 +02:00
|
|
|
vlogs = ValidationLogs(log_path)
|
2020-03-22 22:29:21 +01:00
|
|
|
logs = (vlogs.get_logfile_by_validation(validation_id)
|
2020-06-10 23:19:07 +02:00
|
|
|
if validation_id else vlogs.get_all_logfiles(extension))
|
2020-03-22 22:29:21 +01:00
|
|
|
|
|
|
|
values = []
|
|
|
|
column_name = ('UUID', 'Validations',
|
|
|
|
'Status', 'Execution at',
|
|
|
|
'Duration')
|
2020-06-29 22:09:24 +02:00
|
|
|
for log in logs:
|
|
|
|
vlog = ValidationLog(logfile=log)
|
|
|
|
if vlog.is_valid_format():
|
|
|
|
for play in vlog.get_plays:
|
|
|
|
values.append((play['id'], play['validation_id'],
|
|
|
|
vlog.get_status,
|
|
|
|
play['duration'].get('start'),
|
|
|
|
play['duration'].get('time_elapsed')))
|
|
|
|
return (column_name, values)
|
2020-03-22 22:29:21 +01:00
|
|
|
|
2020-06-29 22:09:24 +02:00
|
|
|
def get_status(self, validation_id=None, uuid=None, status='FAILED',
|
|
|
|
log_path=constants.VALIDATIONS_LOG_BASEDIR):
|
|
|
|
"""Return validations execution details by status"""
|
|
|
|
vlogs = ValidationLogs(log_path)
|
|
|
|
if validation_id:
|
|
|
|
logs = vlogs.get_logfile_by_validation(validation_id)
|
|
|
|
elif uuid:
|
|
|
|
logs = vlogs.get_logfile_by_uuid(uuid)
|
|
|
|
else:
|
|
|
|
raise RuntimeError("You need to provide a validation_id or a uuid")
|
|
|
|
|
|
|
|
values = []
|
|
|
|
column_name = ['name', 'host', 'status', 'task_data']
|
2020-03-22 22:29:21 +01:00
|
|
|
for log in logs:
|
|
|
|
vlog = ValidationLog(logfile=log)
|
2020-06-10 23:19:07 +02:00
|
|
|
if vlog.is_valid_format():
|
2020-06-29 22:09:24 +02:00
|
|
|
for task in vlog.get_tasks_data:
|
|
|
|
if task['status'] == status:
|
|
|
|
for host in task['hosts']:
|
|
|
|
values.append((task['name'], host, task['status'],
|
|
|
|
task['hosts'][host]))
|
2020-03-22 22:29:21 +01:00
|
|
|
return (column_name, values)
|