Move validation data as object
Change-Id: I1b82f3ef24b80d55f96bd76c17278b988e4b611dchanges/30/713730/8
parent
b454a7edc2
commit
95edef7a8c
|
@ -27,8 +27,7 @@ class Show(object):
|
|||
def show_validations(self, validation):
|
||||
"""Display detailed information about a Validation"""
|
||||
# Get validation data:
|
||||
data = v_utils.get_validations_data(
|
||||
v_utils.get_validations_details(validation))
|
||||
data = v_utils.get_validations_data(validation)
|
||||
format = v_utils.get_validations_stats(
|
||||
v_utils.parse_all_validations_logs_on_disk())
|
||||
data.update(format)
|
||||
|
|
|
@ -146,6 +146,13 @@ VALIDATIONS_DATA = {'Description': 'My Validation One Description',
|
|||
VALIDATIONS_STATS = {'Last execution date': '2019-11-25 13:40:14',
|
||||
'Number of execution': 'Total: 1, Passed: 1, Failed: 0'}
|
||||
|
||||
FAKE_PLAYBOOK = {'hosts': 'undercloud',
|
||||
'roles': ['advanced_format_512e_support'],
|
||||
'vars': {'metadata': {'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'name':
|
||||
'Advanced Format 512e Support'}}}
|
||||
|
||||
|
||||
def fake_ansible_runner_run_return(status='successful', rc=0):
|
||||
return status, rc
|
||||
|
|
|
@ -25,9 +25,15 @@ class TestUtils(TestCase):
|
|||
def setUp(self):
|
||||
super(TestUtils, self).setUp()
|
||||
|
||||
def test_get_validations_data(self):
|
||||
res = utils.get_validations_data(fakes.VALIDATIONS_LIST[0])
|
||||
self.assertEqual(res, fakes.VALIDATIONS_DATA)
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_get_validations_data(self, mock_open, mock_data):
|
||||
output = {'Name': 'Advanced Format 512e Support',
|
||||
'Description': 'foo', 'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': '512e'}
|
||||
res = utils.get_validations_data('/foo/512e.yaml')
|
||||
self.assertEqual(res, output)
|
||||
|
||||
def test_get_validations_stats(self):
|
||||
res = utils.get_validations_stats(fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
|
|
|
@ -27,18 +27,18 @@ class TestValidatorShow(TestCase):
|
|||
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LIST)
|
||||
@mock.patch('validations_libs.utils.get_validations_details',
|
||||
return_value=fakes.VALIDATIONS_DATA)
|
||||
@mock.patch('validations_libs.validation.Validation._get_content',
|
||||
return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_logs_on_disk',
|
||||
return_value=fakes.VALIDATIONS_LOGS_CONTENTS_LIST)
|
||||
def test_validation_show(self, mock_parse_validation, mock_data, mock_log):
|
||||
data = {'Description': 'My Validation One Description',
|
||||
'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': 'my_val1',
|
||||
'Name': 'My Validation One Name',
|
||||
'parameters': {}}
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
def test_validation_show(self, mock_open, mock_parse_validation, mock_data,
|
||||
mock_log):
|
||||
data = {'Name': 'Advanced Format 512e Support',
|
||||
'Description': 'foo', 'Groups': ['prep', 'pre-deployment'],
|
||||
'ID': '512e'}
|
||||
data.update({'Last execution date': '2019-11-25 13:40:14',
|
||||
'Number of execution': 'Total: 1, Passed: 1, Failed: 0'})
|
||||
validations_show = Show()
|
||||
out = validations_show.show_validations('foo')
|
||||
out = validations_show.show_validations('512e')
|
||||
self.assertEqual(out, data)
|
||||
|
|
|
@ -22,16 +22,19 @@ import time
|
|||
import yaml
|
||||
|
||||
from validations_libs import constants
|
||||
from validations_libs.validation import Validation
|
||||
from uuid import uuid4
|
||||
|
||||
LOG = logging.getLogger(__name__ + ".utils")
|
||||
|
||||
|
||||
def current_time():
|
||||
"""Return current time"""
|
||||
return '%sZ' % datetime.datetime.utcnow().isoformat()
|
||||
|
||||
|
||||
def create_artifacts_dir(dir_path=None, prefix=None):
|
||||
"""Create Ansible artifacts directory"""
|
||||
dir_path = (dir_path if dir_path else
|
||||
constants.VALIDATION_ANSIBLE_ARTIFACT_PATH)
|
||||
validation_uuid = str(uuid4())
|
||||
|
@ -46,30 +49,21 @@ def create_artifacts_dir(dir_path=None, prefix=None):
|
|||
|
||||
|
||||
def parse_all_validations_on_disk(path, groups=None):
|
||||
"""
|
||||
Return a list of validations metadata
|
||||
Can be sorted by Groups
|
||||
"""
|
||||
results = []
|
||||
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
|
||||
|
||||
if isinstance(groups, six.string_types):
|
||||
group_list = []
|
||||
group_list.append(groups)
|
||||
groups = group_list
|
||||
|
||||
for pl in validations_abspath:
|
||||
validation_id, _ext = os.path.splitext(os.path.basename(pl))
|
||||
|
||||
with open(pl, 'r') as val_playbook:
|
||||
contents = yaml.safe_load(val_playbook)
|
||||
|
||||
validation_groups = get_validation_metadata(contents, 'groups') or []
|
||||
if not groups or set.intersection(set(groups), set(validation_groups)):
|
||||
results.append({
|
||||
'id': validation_id,
|
||||
'name': get_validation_metadata(contents, 'name'),
|
||||
'groups': get_validation_metadata(contents, 'groups'),
|
||||
'description': get_validation_metadata(contents,
|
||||
'description'),
|
||||
'parameters': get_validation_parameters(contents)
|
||||
})
|
||||
val = Validation(pl)
|
||||
if not groups or set(groups).intersection(val.groups):
|
||||
results.append(val.get_metadata)
|
||||
return results
|
||||
|
||||
|
||||
|
@ -91,34 +85,9 @@ def parse_all_validation_groups_on_disk(groups_file_path=None):
|
|||
return results
|
||||
|
||||
|
||||
def get_validation_metadata(validation, key):
|
||||
default_metadata = {
|
||||
'name': 'Unnamed',
|
||||
'description': 'No description',
|
||||
'stage': 'No stage',
|
||||
'groups': [],
|
||||
}
|
||||
|
||||
try:
|
||||
return validation[0]['vars']['metadata'].get(key,
|
||||
default_metadata[key])
|
||||
except KeyError:
|
||||
LOG.exception("Key '{key}' not even found in "
|
||||
"default metadata").format(key=key)
|
||||
except TypeError:
|
||||
LOG.exception("Failed to get validation metadata.")
|
||||
|
||||
|
||||
def get_validation_parameters(validation):
|
||||
try:
|
||||
return {
|
||||
k: v
|
||||
for k, v in validation[0]['vars'].items()
|
||||
if k != 'metadata'
|
||||
}
|
||||
except KeyError:
|
||||
LOG.debug("No parameters found for this validation")
|
||||
return dict()
|
||||
"""Return dictionary of parameters"""
|
||||
return Validation(validation).get_vars
|
||||
|
||||
|
||||
def read_validation_groups_file(groups_file_path=None):
|
||||
|
@ -193,19 +162,7 @@ def get_validations_data(validation):
|
|||
Return validations data with format:
|
||||
ID, Name, Description, Groups, Other param
|
||||
"""
|
||||
data = {}
|
||||
col_keys = ['ID', 'Name', 'Description', 'Groups']
|
||||
if isinstance(validation, dict):
|
||||
for key in validation.keys():
|
||||
if key in map(str.lower, col_keys):
|
||||
for k in col_keys:
|
||||
if key == k.lower():
|
||||
output_key = k
|
||||
data[output_key] = validation.get(key)
|
||||
else:
|
||||
# Get all other values:
|
||||
data[key] = validation.get(key)
|
||||
return data
|
||||
return Validation(validation).get_formated_data
|
||||
|
||||
|
||||
def get_validations_stats(log):
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
# Copyright 2020 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
import logging
|
||||
import os
|
||||
import yaml
|
||||
from collections import OrderedDict
|
||||
|
||||
LOG = logging.getLogger(__name__ + ".validation")
|
||||
|
||||
|
||||
class Validation(object):
|
||||
|
||||
_col_keys = ['ID', 'Name', 'Description', 'Groups']
|
||||
|
||||
def __init__(self, validation_path):
|
||||
self.dict = self._get_content(validation_path)
|
||||
self.id = os.path.splitext(os.path.basename(validation_path))[0]
|
||||
|
||||
def _get_content(self, val_path):
|
||||
with open(val_path, 'r') as val_playbook:
|
||||
return yaml.safe_load(val_playbook)[0]
|
||||
|
||||
@property
|
||||
def get_metadata(self):
|
||||
self.metadata = {'id': self.id}
|
||||
self.metadata.update(self.dict['vars']['metadata'])
|
||||
return self.metadata
|
||||
|
||||
@property
|
||||
def get_vars(self):
|
||||
vars = self.dict['vars'].copy()
|
||||
if vars.get('metadata'):
|
||||
vars.pop('metadata')
|
||||
return vars
|
||||
|
||||
@property
|
||||
def get_data(self):
|
||||
return self.dict
|
||||
|
||||
@property
|
||||
def groups(self):
|
||||
return self.dict['vars']['metadata']['groups']
|
||||
|
||||
@property
|
||||
def get_id(self):
|
||||
return self.id
|
||||
|
||||
@property
|
||||
def get_ordered_dict(self):
|
||||
data = OrderedDict()
|
||||
data.update(self.dict)
|
||||
return data
|
||||
|
||||
@property
|
||||
def get_formated_data(self):
|
||||
data = {}
|
||||
for key in self.get_metadata.keys():
|
||||
if key in map(str.lower, self._col_keys):
|
||||
for k in self._col_keys:
|
||||
if key == k.lower():
|
||||
output_key = k
|
||||
data[output_key] = self.get_metadata.get(key)
|
||||
else:
|
||||
# Get all other values:
|
||||
data[key] = self.get_metadata.get(key)
|
||||
return data
|
Loading…
Reference in New Issue