2020-03-03 16:55:20 +01:00
|
|
|
# Copyright 2020 Red Hat, Inc.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
#
|
2020-03-11 17:07:56 +01:00
|
|
|
import datetime
|
2020-03-03 16:55:20 +01:00
|
|
|
import glob
|
2020-03-04 09:02:17 +01:00
|
|
|
import json
|
2020-03-03 16:55:20 +01:00
|
|
|
import logging
|
|
|
|
import os
|
2020-03-04 09:02:17 +01:00
|
|
|
import six
|
2020-03-16 10:45:53 +01:00
|
|
|
import time
|
2020-03-03 16:55:20 +01:00
|
|
|
|
2020-03-03 18:57:40 +01:00
|
|
|
from validations_libs import constants
|
2020-03-18 23:10:27 +01:00
|
|
|
from validations_libs.group import Group
|
2020-03-18 21:03:25 +01:00
|
|
|
from validations_libs.validation import Validation
|
2020-03-11 17:07:56 +01:00
|
|
|
from uuid import uuid4
|
2020-03-03 18:57:40 +01:00
|
|
|
|
2020-03-03 16:55:20 +01:00
|
|
|
LOG = logging.getLogger(__name__ + ".utils")
|
|
|
|
|
|
|
|
|
2020-03-11 17:07:56 +01:00
|
|
|
def current_time():
|
2020-03-18 21:03:25 +01:00
|
|
|
"""Return current time"""
|
2020-03-11 17:07:56 +01:00
|
|
|
return '%sZ' % datetime.datetime.utcnow().isoformat()
|
|
|
|
|
|
|
|
|
|
|
|
def create_artifacts_dir(dir_path=None, prefix=None):
|
2020-03-18 21:03:25 +01:00
|
|
|
"""Create Ansible artifacts directory"""
|
2020-03-11 17:07:56 +01:00
|
|
|
dir_path = (dir_path if dir_path else
|
|
|
|
constants.VALIDATION_ANSIBLE_ARTIFACT_PATH)
|
|
|
|
validation_uuid = str(uuid4())
|
|
|
|
log_dir = "{}/{}_{}_{}".format(dir_path, validation_uuid,
|
|
|
|
(prefix if prefix else ''), current_time())
|
|
|
|
try:
|
|
|
|
os.makedirs(log_dir)
|
|
|
|
return validation_uuid, log_dir
|
|
|
|
except OSError:
|
|
|
|
LOG.exception("Error while creating Ansible artifacts log file."
|
|
|
|
"Please check the access rights for {}").format(log_dir)
|
|
|
|
|
|
|
|
|
2020-03-03 16:55:20 +01:00
|
|
|
def parse_all_validations_on_disk(path, groups=None):
|
2020-03-18 21:03:25 +01:00
|
|
|
"""
|
|
|
|
Return a list of validations metadata
|
|
|
|
Can be sorted by Groups
|
|
|
|
"""
|
2020-03-03 16:55:20 +01:00
|
|
|
results = []
|
|
|
|
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
|
2020-03-11 17:07:56 +01:00
|
|
|
if isinstance(groups, six.string_types):
|
|
|
|
group_list = []
|
|
|
|
group_list.append(groups)
|
|
|
|
groups = group_list
|
|
|
|
|
2020-03-03 16:55:20 +01:00
|
|
|
for pl in validations_abspath:
|
2020-03-18 21:03:25 +01:00
|
|
|
val = Validation(pl)
|
|
|
|
if not groups or set(groups).intersection(val.groups):
|
|
|
|
results.append(val.get_metadata)
|
2020-03-03 16:55:20 +01:00
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def get_validation_parameters(validation):
|
2020-03-18 21:03:25 +01:00
|
|
|
"""Return dictionary of parameters"""
|
|
|
|
return Validation(validation).get_vars
|
2020-03-04 09:02:17 +01:00
|
|
|
|
|
|
|
|
2020-03-18 23:10:27 +01:00
|
|
|
def read_validation_groups_file(groups_path=None):
|
2020-03-04 09:02:17 +01:00
|
|
|
"""Load groups.yaml file and return a dictionary with its contents"""
|
2020-03-18 23:10:27 +01:00
|
|
|
gp = Group((groups_path if groups_path else
|
|
|
|
constants.VALIDATION_GROUPS_INFO))
|
|
|
|
return gp.get_data
|
2020-03-04 09:02:17 +01:00
|
|
|
|
|
|
|
|
2020-03-18 23:10:27 +01:00
|
|
|
def get_validation_group_name_list(groups_path=None):
|
2020-03-04 09:02:17 +01:00
|
|
|
"""Get the validation group name list only"""
|
2020-03-18 23:10:27 +01:00
|
|
|
gp = Group((groups_path if groups_path else
|
|
|
|
constants.VALIDATION_GROUPS_INFO))
|
|
|
|
return gp.get_groups_keys_list
|
2020-03-04 09:02:17 +01:00
|
|
|
|
|
|
|
|
2020-03-11 17:07:56 +01:00
|
|
|
def get_new_validations_logs_on_disk(validations_logs_dir):
|
2020-03-04 09:02:17 +01:00
|
|
|
"""Return a list of new log execution filenames """
|
|
|
|
files = []
|
|
|
|
|
2020-03-11 17:07:56 +01:00
|
|
|
for root, dirs, filenames in os.walk(validations_logs_dir):
|
2020-03-04 09:02:17 +01:00
|
|
|
files = [
|
|
|
|
f for f in filenames if not f.startswith('processed')
|
|
|
|
and os.path.splitext(f)[1] == '.json'
|
|
|
|
]
|
|
|
|
return files
|
2020-03-16 10:45:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def parse_all_validations_logs_on_disk(uuid_run=None, validation_id=None):
|
|
|
|
results = []
|
|
|
|
path = constants.VALIDATIONS_LOG_BASEDIR
|
|
|
|
logfile = "{}/*.json".format(path)
|
|
|
|
|
|
|
|
if validation_id:
|
|
|
|
logfile = "{}/*_{}_*.json".format(path, validation_id)
|
|
|
|
if uuid_run:
|
|
|
|
logfile = "{}/*_{}_*.json".format(path, uuid_run)
|
|
|
|
|
|
|
|
logfiles_path = glob.glob(logfile)
|
|
|
|
|
|
|
|
for logfile_path in logfiles_path:
|
|
|
|
with open(logfile_path, 'r') as log:
|
|
|
|
contents = json.load(log)
|
|
|
|
results.append(contents)
|
|
|
|
return results
|
|
|
|
|
|
|
|
|
|
|
|
def get_validations_details(validation):
|
2020-03-17 16:10:17 +01:00
|
|
|
"""Return validations information"""
|
2020-03-16 10:45:53 +01:00
|
|
|
results = parse_all_validations_on_disk(constants.ANSIBLE_VALIDATION_DIR)
|
|
|
|
for r in results:
|
|
|
|
if r['id'] == validation:
|
|
|
|
return r
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
|
|
def get_validations_data(validation):
|
2020-03-17 16:10:17 +01:00
|
|
|
"""
|
|
|
|
Return validations data with format:
|
|
|
|
ID, Name, Description, Groups, Other param
|
|
|
|
"""
|
2020-03-18 21:03:25 +01:00
|
|
|
return Validation(validation).get_formated_data
|
2020-03-16 10:45:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_validations_stats(log):
|
2020-03-17 16:10:17 +01:00
|
|
|
"""Return validations stats from a log file"""
|
2020-03-16 10:45:53 +01:00
|
|
|
# Get validation stats
|
|
|
|
total_number = len(log)
|
|
|
|
failed_number = 0
|
|
|
|
passed_number = 0
|
|
|
|
last_execution = None
|
|
|
|
dates = []
|
|
|
|
|
|
|
|
for l in log:
|
|
|
|
if l.get('validation_output'):
|
|
|
|
failed_number += 1
|
|
|
|
else:
|
|
|
|
passed_number += 1
|
|
|
|
|
|
|
|
date_time = \
|
|
|
|
l['plays'][0]['play']['duration'].get('start').split('T')
|
|
|
|
date_start = date_time[0]
|
|
|
|
time_start = date_time[1].split('Z')[0]
|
|
|
|
newdate = \
|
|
|
|
time.strptime(date_start + time_start, '%Y-%m-%d%H:%M:%S.%f')
|
|
|
|
dates.append(newdate)
|
|
|
|
|
|
|
|
if dates:
|
|
|
|
last_execution = time.strftime('%Y-%m-%d %H:%M:%S', max(dates))
|
|
|
|
|
|
|
|
return {"Last execution date": last_execution,
|
|
|
|
"Number of execution": "Total: {}, Passed: {}, "
|
|
|
|
"Failed: {}".format(total_number,
|
|
|
|
passed_number,
|
|
|
|
failed_number)}
|