DNM First steps towards new validations

Signed-off-by: Jiri Podivin <jpodivin@redhat.com>
Change-Id: Id7eaebc7bd9a4a80d09f9c98ca6fb58173488003
This commit is contained in:
Jiri Podivin 2023-01-27 16:47:17 +01:00
parent 430530fad5
commit 60d808402c
19 changed files with 501 additions and 697 deletions

View File

@ -320,7 +320,7 @@ class Ansible:
def run(self, playbook, inventory, workdir, playbook_dir=None,
connection='smart', output_callback=None,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
base_dir=constants.DEFAULT_ANSIBLE_BASEDIR,
ssh_user=None, key=None, module_path=None,
limit_hosts=None, tags=None, skip_tags=None,
verbosity=0, quiet=False, extra_vars=None,

View File

@ -56,7 +56,7 @@ class CommunityValidationInit(BaseCommand):
"is located."))
parser.add_argument('--ansible-base-dir', dest='ansible_base_dir',
default=constants.DEFAULT_VALIDATIONS_BASEDIR,
default=constants.DEFAULT_ANSIBLE_BASEDIR,
help=("Path where the ansible roles, library "
"and plugins are located."))
return parser
@ -71,7 +71,7 @@ class CommunityValidationInit(BaseCommand):
validation_dir=parsed_args.validation_dir,
ansible_base_dir=parsed_args.ansible_base_dir)
if co_validation.is_community_validations_enabled(self.base.config):
if utils.community_validations_on(self.base.config):
LOG.debug(
(
"Checking the presence of the community validations "
@ -96,7 +96,7 @@ class CommunityValidationInit(BaseCommand):
)
)
if co_validation.is_playbook_exists():
if co_validation.validation_exists(self.base.config):
raise RuntimeError(
(
"An Ansible playbook called {} "

View File

@ -58,7 +58,7 @@ class ValidationList(BaseLister):
validation_dir = parsed_args.validation_dir
group = parsed_args.group
v_actions = ValidationActions(validation_path=validation_dir)
v_actions = ValidationActions(base_validation_path=validation_dir)
return (v_actions.list_validations(groups=group,
categories=category,
products=product,

View File

@ -52,7 +52,7 @@ class Run(BaseCommand):
help=cli_constants.PLAY_PATH_DESC)
parser.add_argument('--ansible-base-dir', dest='ansible_base_dir',
default=constants.DEFAULT_VALIDATIONS_BASEDIR,
default=constants.DEFAULT_ANSIBLE_BASEDIR,
help=("Path where the ansible roles, library "
"and plugins are located.\n"))

View File

@ -44,8 +44,8 @@ class Show(BaseShow):
validation_dir = parsed_args.validation_dir
validation_name = parsed_args.validation_name
v_actions = ValidationActions(validation_path=validation_dir)
data = v_actions.show_validations(
v_actions = ValidationActions(base_validation_path=validation_dir)
data = v_actions.show_validation(
validation_name, validation_config=self.base.config)
if data:

View File

@ -15,16 +15,12 @@
# under the License.
#
from validations_libs.logger import getLogger
import re
import os
# @matbu backward compatibility for stable/train
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
import re
from pathlib import Path
from validations_libs import constants, utils
from validations_libs.logger import getLogger
LOG = getLogger(__name__)
@ -40,7 +36,7 @@ class CommunityValidation:
self,
validation_name,
validation_dir=constants.ANSIBLE_VALIDATION_DIR,
ansible_base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR):
ansible_base_dir=constants.DEFAULT_ANSIBLE_BASEDIR):
"""Construct Role and Playbook."""
self._validation_name = validation_name
@ -126,42 +122,32 @@ class CommunityValidation:
return Path(self.role_dir_path).exists() or \
self.role_name in non_community_roles
def is_playbook_exists(self):
"""New playbook existence check
def validation_exists(self, validation_config=None):
"""New validation existence check
This class method checks if the new playbook file is already existing
in the official validations catalog and in the current community
validations directory.
This class method checks if the requested playbook file already exists
in any of the validation playbook paths.
First, it gets the list of the playbooks yaml file available in
``constants.ANSIBLE_VALIDATIONS_DIR``. If there is a match in at least
one of the directories, it returns ``True``, otherwise ``False``.
Method first checks if the validation is in the default path,
afterwards it retrieves information about validation playbooks
in locations defined by ``constants.VALIDATION_PLAYBOOK_DIRS``,
and compares their ids with that of a new validation.
If there is any match it returns ``True``, otherwise ``False``.
:validation_config: parsed validation.cfg file
:dtype: ``dict``
:rtype: ``Boolean``
"""
non_community_playbooks = []
if Path(self.validation_dir).exists():
non_community_playbooks = [
Path(x).name
for x in Path(self.validation_dir).iterdir()
if x.is_file()
]
if Path(self.playbook_path).exists():
return True
return Path(self.playbook_path).exists() or \
self.playbook_name in non_community_playbooks
existing_validations = utils.parse_validations(
str(self.playbook_basedir.resolve()),
validation_config=validation_config)
def is_community_validations_enabled(self, base_config):
"""Checks if the community validations are enabled in the config file
:param base_config: Contents of the configuration file
:type base_config: ``Dict``
:rtype: ``Boolean``
"""
config = base_config
default_conf = (config.get('default', {})
if isinstance(config, dict) else {})
return default_conf.get('enable_community_validations', True)
return self._validation_name in [val.id for val in existing_validations]
@property
def role_name(self):

View File

@ -22,25 +22,29 @@ or as a fallback, when custom locations fail.
import os
# @matbu backward compatibility for stable/train
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
from pathlib import Path
DEFAULT_VALIDATIONS_BASEDIR = '/usr/share/ansible'
DEFAULT_ANSIBLE_BASEDIR = '/usr/share/ansible'
ANSIBLE_VALIDATION_DIR = os.path.join(
DEFAULT_VALIDATIONS_BASEDIR,
DEFAULT_ANSIBLE_BASEDIR,
'validation-playbooks')
ANSIBLE_ROLES_DIR = Path.joinpath(Path(DEFAULT_VALIDATIONS_BASEDIR),
ANSIBLE_ROLES_DIR = Path.joinpath(Path(DEFAULT_ANSIBLE_BASEDIR),
'roles')
VALIDATION_GROUPS_INFO = os.path.join(
DEFAULT_VALIDATIONS_BASEDIR,
DEFAULT_ANSIBLE_BASEDIR,
'groups.yaml')
COLLECTION_VALIDATIONS_PATH = 'collections/ansible-collections/validations/*/playbooks/'
VALIDATION_PLAYBOOK_DIRS = [
ANSIBLE_VALIDATION_DIR,
os.path.join(os.path.expanduser('~'), COLLECTION_VALIDATIONS_PATH),
os.path.join(DEFAULT_ANSIBLE_BASEDIR, COLLECTION_VALIDATIONS_PATH)
]
# NOTE(fressi) The HOME folder environment variable may be undefined.
VALIDATIONS_LOG_BASEDIR = os.path.expanduser('~/validations')

View File

@ -39,3 +39,13 @@ class ValidationShowException(Exception):
of the `ValidationsActions` class, cause unacceptable behavior
from which it is impossible to recover.
"""
class ValidationParsingException(Exception):
"""ValidationParsingException is to be raised when playbook
retrieved from storage and parsed in the `__init__` method
of the `Validation` object is malformed, or incompatible with
the requirements of validation runtime.
The exception should be raised as soon as possible after parsing
of the file, in order to ensure the fastest report and recovery.
"""

View File

@ -32,7 +32,7 @@ class TestCommunityValidationInit(BaseCommand):
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.execute')
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.is_playbook_exists',
'validations_libs.community.init_validation.CommunityValidation.validation_exists',
return_value=False)
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.is_role_exists',
@ -50,7 +50,7 @@ class TestCommunityValidationInit(BaseCommand):
self.cmd.take_action(parsed_args)
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.is_community_validations_enabled',
'validations_libs.utils.community_validations_on',
return_value=False)
def test_validation_init_with_com_val_disabled(self, mock_config):
args = self._set_args(['my_new_community_val'])
@ -64,7 +64,7 @@ class TestCommunityValidationInit(BaseCommand):
'validations_libs.community.init_validation.CommunityValidation.is_role_exists',
return_value=True)
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.is_playbook_exists',
'validations_libs.community.init_validation.CommunityValidation.validation_exists',
return_value=False)
@mock.patch('validations_libs.utils.check_community_validations_dir')
def test_validation_init_with_role_existing(self,
@ -82,7 +82,7 @@ class TestCommunityValidationInit(BaseCommand):
'validations_libs.community.init_validation.CommunityValidation.is_role_exists',
return_value=False)
@mock.patch(
'validations_libs.community.init_validation.CommunityValidation.is_playbook_exists',
'validations_libs.community.init_validation.CommunityValidation.validation_exists',
return_value=True)
@mock.patch('validations_libs.utils.check_community_validations_dir')
def test_validation_init_with_playbook_existing(self,

View File

@ -12,13 +12,9 @@
# License for the specific language governing permissions and limitations
# under the License.
#
try:
from unittest import mock
except ImportError:
import mock
from unittest import mock
from validations_libs.cli import lister
from validations_libs.tests import fakes
from validations_libs.tests.cli.fakes import BaseCommand
@ -28,35 +24,19 @@ class TestList(BaseCommand):
super(TestList, self).setUp()
self.cmd = lister.ValidationList(self.app, None)
@mock.patch('validations_libs.validation_actions.ValidationActions.'
'list_validations',
return_value=fakes.VALIDATIONS_LIST,
@mock.patch('validations_libs.cli.lister.ValidationActions',
autospec=True)
def test_list_validations(self, mock_list):
def test_list_validations(self, mock_actions):
arglist = ['--validation-dir', 'foo']
verifylist = [('validation_dir', 'foo')]
val_list = [
{'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment', 'no-op', 'post'],
'categories': ['os', 'system', 'ram'],
'products': ['product1'],
'id': 'my_val1',
'name': 'My Validation One Name',
'parameters': {}
}, {
'description': 'My Validation Two Description',
'groups': ['prep', 'pre-introspection', 'post', 'pre'],
'categories': ['networking'],
'products': ['product1'],
'id': 'my_val2',
'name': 'My Validation Two Name',
'parameters': {'min_value': 8}
}]
list_call = mock.call().list_validations(
groups=[], categories=[], products=[], validation_config={})
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertEqual(result, val_list)
self.cmd.take_action(parsed_args)
mock_actions.assert_called_once_with(arglist[1])
mock_actions.assert_has_calls([list_call])
@mock.patch('validations_libs.validation_actions.ValidationActions.'
'list_validations',
@ -70,44 +50,46 @@ class TestList(BaseCommand):
result = self.cmd.take_action(parsed_args)
self.assertEqual(result, [])
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
return_value=fakes.VALIDATIONS_LIST_GROUP,
@mock.patch('validations_libs.cli.lister.ValidationActions',
autospec=True)
def test_list_validations_group(self, mock_list):
def test_list_validations_group(self, mock_actions):
arglist = ['--validation-dir', 'foo', '--group', 'prep']
verifylist = [('validation_dir', 'foo'),
('group', ['prep'])]
val_list = fakes.VALIDATION_LIST_RESULT
list_call = mock.call().list_validations(
groups=['prep'], categories=[], products=[], validation_config={})
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertEqual(result, val_list)
mock_actions.assert_called_once_with(arglist[1])
mock_actions.assert_has_calls([list_call])
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
return_value=fakes.VALIDATIONS_LIST_GROUP,
@mock.patch('validations_libs.cli.lister.ValidationActions',
autospec=True)
def test_list_validations_by_category(self, mock_list):
def test_list_validations_by_category(self, mock_actions):
arglist = ['--validation-dir', 'foo', '--category', 'networking']
verifylist = [('validation_dir', 'foo'),
('category', ['networking'])]
val_list = fakes.VALIDATION_LIST_RESULT
list_call = mock.call().list_validations(
groups=[], categories=['networking'], products=[], validation_config={})
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertEqual(result, val_list)
self.cmd.take_action(parsed_args)
mock_actions.assert_called_once_with(arglist[1])
mock_actions.assert_has_calls([list_call])
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
return_value=fakes.VALIDATIONS_LIST_GROUP,
@mock.patch('validations_libs.cli.lister.ValidationActions',
autospec=True)
def test_list_validations_by_product(self, mock_list):
def test_list_validations_by_product(self, mock_actions):
arglist = ['--validation-dir', 'foo', '--product', 'product1']
verifylist = [('validation_dir', 'foo'),
('product', ['product1'])]
val_list = fakes.VALIDATION_LIST_RESULT
list_call = mock.call().list_validations(
groups=[], categories=[], products=['product1'], validation_config={})
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertEqual(result, val_list)
self.cmd.take_action(parsed_args)
mock_actions.assert_called_once_with(arglist[1])
mock_actions.assert_has_calls([list_call])

View File

@ -29,7 +29,7 @@ class TestShow(BaseCommand):
self.cmd = show.Show(self.app, None)
@mock.patch('validations_libs.validation_actions.ValidationActions.'
'show_validations')
'show_validation')
def test_show_validations(self, mock_show):
arglist = ['foo']
verifylist = [('validation_name', 'foo')]

View File

@ -13,20 +13,8 @@
# under the License.
#
try:
from unittest import mock
except ImportError:
import mock
# @matbu backward compatibility for stable/train
try:
from pathlib import PosixPath
PATHLIB = 'pathlib'
except ImportError:
from pathlib2 import PosixPath
PATHLIB = 'pathlib2'
from unittest import TestCase
from pathlib import PosixPath
from unittest import TestCase, mock
from validations_libs import constants
from validations_libs.community.init_validation import \
@ -90,10 +78,10 @@ class TestCommunityValidation(TestCase):
self.assertEqual(co_val.playbook_basedir,
constants.COMMUNITY_PLAYBOOKS_DIR)
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_ROLES_ITERDIR2)
@mock.patch('{}.Path.is_dir'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[False, True])
@mock.patch('pathlib.Path.is_dir')
@mock.patch('pathlib.Path.exists', side_effect=[False, True])
def test_role_already_exists_in_comval(self,
mock_play_path_exists,
mock_path_is_dir,
@ -102,10 +90,10 @@ class TestCommunityValidation(TestCase):
co_val = cv(validation_name)
self.assertTrue(co_val.is_role_exists())
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_ROLES_ITERDIR1)
@mock.patch('{}.Path.is_dir'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[True, False])
@mock.patch('pathlib.Path.is_dir')
@mock.patch('pathlib.Path.exists', side_effect=[True, False])
def test_role_already_exists_in_non_comval(self,
mock_play_path_exists,
mock_path_is_dir,
@ -114,10 +102,10 @@ class TestCommunityValidation(TestCase):
co_val = cv(validation_name)
self.assertTrue(co_val.is_role_exists())
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_ROLES_ITERDIR2)
@mock.patch('{}.Path.is_dir'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[True, False])
@mock.patch('pathlib.Path.is_dir')
@mock.patch('pathlib.Path.exists', side_effect=[True, False])
def test_role_not_exists(self,
mock_path_exists,
mock_path_is_dir,
@ -126,41 +114,38 @@ class TestCommunityValidation(TestCase):
co_val = cv(validation_name)
self.assertFalse(co_val.is_role_exists())
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_PLAYBOOKS_ITERDIR1)
@mock.patch('{}.Path.is_file'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[True, False])
@mock.patch('pathlib.Path.is_file')
@mock.patch('pathlib.Path.exists', side_effect=[True, False])
def test_playbook_already_exists_in_non_comval(self,
mock_path_exists,
mock_path_is_file,
mock_path_iterdir):
validation_name = "my_val"
co_val = cv(validation_name)
self.assertTrue(co_val.is_playbook_exists())
self.assertTrue(co_val.validation_exists())
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
return_value=fakes.FAKE_PLAYBOOKS_ITERDIR2)
@mock.patch('{}.Path.is_file'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[False, True])
@mock.patch('glob.glob')
@mock.patch('pathlib.Path.is_file')
@mock.patch('pathlib.Path.exists', side_effect=[True])
def test_playbook_already_exists_in_comval(self,
mock_path_exists,
mock_path_is_file,
mock_path_iterdir):
mock_glob):
validation_name = "my_val"
co_val = cv(validation_name)
self.assertTrue(co_val.is_playbook_exists())
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
return_value=fakes.FAKE_PLAYBOOKS_ITERDIR2)
@mock.patch('{}.Path.is_file'.format(PATHLIB))
@mock.patch('{}.Path.exists'.format(PATHLIB), side_effect=[True, False])
co_val = cv(validation_name)
self.assertTrue(co_val.validation_exists())
@mock.patch('pathlib.Path.is_file')
@mock.patch('pathlib.Path.exists', side_effect=[False, False])
def test_playbook_not_exists(self,
mock_path_exists,
mock_path_is_file,
mock_path_iterdir):
mock_path_is_file):
validation_name = "my_val"
co_val = cv(validation_name)
self.assertFalse(co_val.is_playbook_exists())
self.assertFalse(co_val.validation_exists())
def test_execute_with_role_name_not_compliant(self):
validation_name = "3_my-val"

View File

@ -21,7 +21,7 @@ except ImportError:
from validations_libs import constants
VALIDATIONS_LIST = [{
VALIDATIONS_LIST = [{'vars': {'metadata': {
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment', 'no-op', 'post'],
'categories': ['os', 'system', 'ram'],
@ -29,7 +29,7 @@ VALIDATIONS_LIST = [{
'id': 'my_val1',
'name': 'My Validation One Name',
'parameters': {}
}, {
}}}, {'vars': {'metadata': {
'description': 'My Validation Two Description',
'groups': ['prep', 'pre-introspection', 'post', 'pre'],
'categories': ['networking'],
@ -37,7 +37,7 @@ VALIDATIONS_LIST = [{
'id': 'my_val2',
'name': 'My Validation Two Name',
'parameters': {'min_value': 8}
}]
}}}]
VALIDATIONS_LIST_GROUP = [{
'description': 'My Validation Two Description',

View File

@ -16,23 +16,11 @@
import logging
import os
import subprocess
from pathlib import PosixPath
from unittest import TestCase, mock
try:
from unittest import mock
except ImportError:
import mock
# @matbu backward compatibility for stable/train
try:
from pathlib import PosixPath
PATHLIB = 'pathlib'
except ImportError:
from pathlib2 import PosixPath
PATHLIB = 'pathlib2'
from unittest import TestCase
from validations_libs import utils, constants
from validations_libs import constants, utils
from validations_libs.validation import Validation
from validations_libs.tests import fakes
@ -41,6 +29,23 @@ class TestUtils(TestCase):
def setUp(self):
super(TestUtils, self).setUp()
self.logger = mock.patch('validations_libs.logger.getLogger')
# Mocking all glob calls, so that only the first path will
# return any items
globs = [['/foo/playbook/foo.yaml'], []]
globs.extend(len(constants.VALIDATION_PLAYBOOK_DIRS)*[[]])
def _return_empty_list(_=None):
while True:
yield []
def return_on_community(path):
if 'community' in path:
return ['/home/foo/community-validations/playbooks/foo.yaml']
return []
self.globs_first_only = globs
self.return_empty_list = _return_empty_list
self.return_on_community = return_on_community
@mock.patch('validations_libs.validation.Validation._get_content',
return_value=fakes.FAKE_PLAYBOOK[0])
@ -101,200 +106,202 @@ class TestUtils(TestCase):
utils.get_validations_data,
validation)
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk(self, mock_glob, mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
result = utils.parse_all_validations_on_disk('/foo/playbook')
self.assertEqual(result, [fakes.FAKE_METADATA])
mock_load, mock_is_file):
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook')
self.assertTrue(isinstance(result[0], Validation))
self.assertEqual(result[0].get_metadata, fakes.FAKE_METADATA)
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_community_validations_on_disk(
self, mock_glob, mock_open, mock_load):
mock_glob.side_effect = \
([], ['/foo/playbook/foo.yaml'])
result = utils.parse_all_validations_on_disk('/foo/playbook')
self.assertEqual(result, [fakes.FAKE_METADATA])
self, mock_glob, mock_open, mock_load, mock_is_file):
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook')
self.assertTrue(isinstance(result[0], Validation))
self.assertEqual(result[0].get_metadata, fakes.FAKE_METADATA)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_community_disabled_validations_on_disk(
self, mock_glob, mock_open, mock_load):
mock_glob.side_effect = \
([], ['/foo/playbook/foo.yaml'])
result = utils.parse_all_validations_on_disk(
mock_glob.side_effect = self.return_empty_list()
result = utils.parse_validations(
'/foo/playbook',
validation_config={'default': {"enable_community_validations": False}})
self.assertEqual(result, [])
def test_parse_all_validations_on_disk_wrong_path_type(self):
self.assertRaises(TypeError,
utils.parse_all_validations_on_disk,
utils.parse_validations,
path=['/foo/playbook'])
def test_parse_all_validations_on_disk_wrong_groups_type(self):
self.assertRaises(TypeError,
utils.parse_all_validations_on_disk,
utils.parse_validations,
path='/foo/playbook',
groups='foo1,foo2')
def test_parse_all_validations_on_disk_wrong_categories_type(self):
self.assertRaises(TypeError,
utils.parse_all_validations_on_disk,
utils.parse_validations,
path='/foo/playbook',
categories='foo1,foo2')
def test_parse_all_validations_on_disk_wrong_products_type(self):
self.assertRaises(TypeError,
utils.parse_all_validations_on_disk,
utils.parse_validations,
path='/foo/playbook',
products='foo1,foo2')
def test_get_validations_playbook_wrong_validation_id_type(self):
self.assertRaises(TypeError,
utils.get_validations_playbook,
utils.get_validations_playbook_paths,
path='/foo/playbook',
validation_id='foo1,foo2')
def test_get_validations_playbook_wrong_groups_type(self):
self.assertRaises(TypeError,
utils.get_validations_playbook,
utils.get_validations_playbook_paths,
path='/foo/playbook',
groups='foo1,foo2')
def test_get_validations_playbook_wrong_categories_type(self):
self.assertRaises(TypeError,
utils.get_validations_playbook,
utils.get_validations_playbook_paths,
path='/foo/playbook',
categories='foo1,foo2')
def test_get_validations_playbook_wrong_products_type(self):
self.assertRaises(TypeError,
utils.get_validations_playbook,
utils.get_validations_playbook_paths,
path='/foo/playbook',
products='foo1,foo2')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk_by_group(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
result = utils.parse_all_validations_on_disk('/foo/playbook',
['prep'])
self.assertEqual(result, [fakes.FAKE_METADATA])
mock_load,
mock_isfile):
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook', groups=['prep'])
self.assertEqual(result[0].get_metadata, fakes.FAKE_METADATA)
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk_by_category(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
result = utils.parse_all_validations_on_disk('/foo/playbook',
categories=['os'])
self.assertEqual(result, [fakes.FAKE_METADATA])
mock_load,
mock_isfile):
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook',
categories=['os'])
self.assertEqual(result[0].get_metadata, fakes.FAKE_METADATA)
def test_get_validations_playbook_wrong_path_type(self):
self.assertRaises(TypeError,
utils.get_validations_playbook,
utils.get_validations_playbook_paths,
path=['/foo/playbook'])
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk_by_product(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
result = utils.parse_all_validations_on_disk('/foo/playbook',
products=['product1'])
self.assertEqual(result, [fakes.FAKE_METADATA])
mock_load,
mock_isfile):
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook',
products=['product1'])
self.assertEqual(result[0].get_metadata, fakes.FAKE_METADATA)
@mock.patch('os.path.isfile')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk_parsing_error(self, mock_glob,
mock_open, mock_load):
globs_t = self.globs_first_only
globs_t[-1] = ['/usr/ansible/playbooks/nonsense.yaml']
mock_glob.side_effect = globs_t
result = utils.parse_validations('/foo/playbook')
self.assertEqual(result, [])
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_validations_playbook_by_id(self, mock_open, mock_load,
mock_glob, mock_isfile):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
mock_isfile.return_value = True
result = utils.get_validations_playbook('/foo/playbook',
validation_id=['foo'])
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook', validation_ids=['foo'])
self.assertEqual(result[0].path, '/foo/playbook/foo.yaml')
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_community_playbook_by_id(self, mock_open, mock_load,
mock_glob, mock_isfile):
mock_glob.side_effect = (
[],
['/home/foo/community-validations/playbooks/foo.yaml'])
mock_isfile.return_value = True
# AP this needs a bit of an explanation. We look at the explicity at
mock_glob.side_effect = self.return_on_community
# the /foo/playbook directory but the community validation path is
# implicit and we find there the id that we are looking for.
result = utils.get_validations_playbook('/foo/playbook',
validation_id=['foo'])
self.assertEqual(result, ['/home/foo/community-validations/playbooks/foo.yaml'])
result = utils.parse_validations('/foo/playbook')
self.assertEqual(result[0].path, '/home/foo/community-validations/playbooks/foo.yaml')
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_community_disabled_playbook_by_id(
self, mock_open, mock_load, mock_glob, mock_isfile):
mock_glob.side_effect = (
[],
['/home/foo/community-validations/playbooks/foo.yaml'])
mock_isfile.return_value = True
mock_glob.side_effect = self.return_on_community
# The validations_commons validation is not found and community_vals is disabled
# So no validation should be found.
result = utils.get_validations_playbook(
result = utils.parse_validations(
'/foo/playbook',
validation_id=['foo'],
validation_ids=['foo'],
validation_config={'default': {"enable_community_validations": False}})
self.assertEqual(result, [])
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=False)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_community_playbook_by_id_not_found(
self, mock_open, mock_load, mock_glob, mock_isfile):
mock_glob.side_effect = (
[],
['/home/foo/community-validations/playbooks/foo.yaml/'])
# the is file fails
mock_isfile.return_value = False
result = utils.get_validations_playbook('/foo/playbook',
validation_id=['foo'])
mock_glob.side_effect = self.return_on_community
# the is_file fails
result = utils.parse_validations('/foo/playbook',
validation_ids=['foo'])
self.assertEqual(result, [])
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_validations_playbook_by_id_group(self, mock_open, mock_load,
mock_glob, mock_isfile):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
mock_isfile.return_value = True
result = utils.get_validations_playbook('/foo/playbook', ['foo'], ['prep'])
self.assertEqual(result, ['/foo/playbook/foo.yaml',
'/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook', ['foo'], ['prep'])
self.assertEqual(result[0].path, '/foo/playbook/foo.yaml')
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('os.listdir')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@ -303,34 +310,31 @@ class TestUtils(TestCase):
mock_listdir,
mock_isfile):
mock_listdir.return_value = ['foo.yaml']
mock_isfile.return_value = True
result = utils.get_validations_playbook('/foo/playbook',
groups=['no_group'])
result = utils.parse_validations('/foo/playbook',
groups=['no_group'])
self.assertEqual(result, [])
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_validations_playbook_by_category(self, mock_open, mock_load,
mock_glob, mock_isfile):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
mock_isfile.return_value = True
result = utils.get_validations_playbook('/foo/playbook',
categories=['os', 'storage'])
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook',
categories=['os', 'storage'])
self.assertEqual(result[0].path, '/foo/playbook/foo.yaml')
@mock.patch('os.path.isfile')
@mock.patch('os.path.isfile', return_value=True)
@mock.patch('glob.glob')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_validations_playbook_by_product(self, mock_open, mock_load,
mock_glob, mock_isfile):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
mock_isfile.return_value = True
result = utils.get_validations_playbook('/foo/playbook',
products=['product1'])
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_validations('/foo/playbook',
products=['product1'])
self.assertEqual(result[0].path, '/foo/playbook/foo.yaml')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@ -376,30 +380,12 @@ class TestUtils(TestCase):
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
@mock.patch('builtins.open')
def test_get_validations_parameters_no_group(self, mock_open, mock_load):
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
['foo'])
def test_get_validations_parameters(self, mock_open, mock_load):
test_validation = Validation('/foo/playbook/foo.yaml')
result = utils.get_validations_parameters([test_validation])
output = {'foo': {'parameters': {'foo': 'bar'}}}
self.assertEqual(result, output)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
@mock.patch('builtins.open')
def test_get_validations_parameters_no_val(self, mock_open, mock_load):
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
[], ['prep'])
output = {'foo': {'parameters': {'foo': 'bar'}}}
self.assertEqual(result, output)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_validations_parameters_nothing(self, mock_open, mock_load):
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
[], [])
self.assertEqual(result, {})
@mock.patch('validations_libs.utils.os.makedirs')
@mock.patch(
'validations_libs.utils.os.access',
@ -521,13 +507,13 @@ class TestUtils(TestCase):
results['ansible_environment']['ANSIBLE_STDOUT_CALLBACK'],
fakes.ANSIBLE_ENVIRONNMENT_CONFIG['ANSIBLE_STDOUT_CALLBACK'])
@mock.patch('{}.Path.exists'.format(PATHLIB),
@mock.patch('pathlib.Path.exists',
return_value=False)
@mock.patch('{}.Path.is_dir'.format(PATHLIB),
@mock.patch('pathlib.Path.is_dir',
return_value=False)
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=iter([]))
@mock.patch('{}.Path.mkdir'.format(PATHLIB))
@mock.patch('pathlib.Path.mkdir')
def test_check_creation_community_validations_dir(self, mock_mkdir,
mock_iterdir,
mock_isdir,
@ -543,11 +529,11 @@ class TestUtils(TestCase):
PosixPath("/foo/bar/community-validations/lookup_plugins")]
)
@mock.patch('{}.Path.is_dir'.format(PATHLIB), return_value=True)
@mock.patch('{}.Path.exists'.format(PATHLIB), return_value=True)
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.is_dir', return_value=True)
@mock.patch('pathlib.Path.exists', return_value=True)
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_COVAL_MISSING_SUBDIR_ITERDIR1)
@mock.patch('{}.Path.mkdir'.format(PATHLIB))
@mock.patch('pathlib.Path.mkdir')
def test_check_community_validations_dir_with_missing_subdir(self,
mock_mkdir,
mock_iterdir,

View File

@ -21,6 +21,7 @@ from unittest import TestCase
from validations_libs.validation import Validation
from validations_libs.tests import fakes
from validations_libs import exceptions
class TestValidation(TestCase):
@ -44,10 +45,10 @@ class TestValidation(TestCase):
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_metadata_wrong_playbook(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_metadata
self.assertEqual('No metadata found in validation foo',
def test_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(exceptions.ValidationParsingException) as exc_mgr:
Validation('/tmp/foo')
self.assertEqual('No metadata found in validation /tmp/foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
@ -64,14 +65,6 @@ class TestValidation(TestCase):
data = val.get_vars
self.assertEqual(data, {})
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_vars_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_vars
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_id(self, mock_open, mock_yaml):
@ -88,14 +81,6 @@ class TestValidation(TestCase):
groups = val.groups
self.assertEqual(groups, ['prep', 'pre-deployment'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_groups_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').groups
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_groups_with_no_existing_groups(self, mock_open, mock_yaml):
@ -110,14 +95,6 @@ class TestValidation(TestCase):
categories = val.categories
self.assertEqual(categories, ['os', 'storage'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_categories_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').categories
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_categories_with_no_existing_categories(self, mock_open, mock_yaml):
@ -132,14 +109,6 @@ class TestValidation(TestCase):
products = val.products
self.assertEqual(products, ['product1'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_products_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').products
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_products_with_no_existing_products(self, mock_open, mock_yaml):
@ -161,19 +130,10 @@ class TestValidation(TestCase):
data = val.get_formated_data
self.assertEqual(data, fakes.FORMATED_DATA)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_formated_data_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_formated_data
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('builtins.open')
def test_validation_not_found(self, mock_open):
mock_open.side_effect = IOError()
self.assertRaises(
IOError,
exceptions.ValidationParsingException,
Validation,
'non-existing.yaml'
)
'non-existing.yaml')

View File

@ -13,18 +13,15 @@
# under the License.
#
try:
from unittest import mock
from unittest.mock import ANY
except ImportError:
import mock
from mock import ANY
from unittest import TestCase
from unittest import TestCase, mock
from unittest.mock import ANY
from validations_libs.exceptions import (ValidationRunException,
ValidationShowException,
ValidationParsingException)
from validations_libs.tests import fakes
from validations_libs.validation import Validation
from validations_libs.validation_actions import ValidationActions
from validations_libs.exceptions import ValidationRunException, ValidationShowException
class TestValidationActions(TestCase):
@ -33,9 +30,16 @@ class TestValidationActions(TestCase):
super(TestValidationActions, self).setUp()
self.column_name = ('ID', 'Name', 'Groups', 'Categories', 'Products')
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
return_value=fakes.VALIDATIONS_LIST)
def test_validation_list(self, mock_validation_dir):
@mock.patch('yaml.safe_load')
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.parse_validations')
def test_validation_list(self, mock_validation_dir, mock_open, mock_yaml_load):
mock_yaml_load.side_effect = [[element] for element in fakes.VALIDATIONS_LIST]
mock_validation_dir.return_value = [
Validation('/foo/bar.yaml'),
Validation('/fizz/buzz.yaml')
]
validations_list = ValidationActions('/tmp/foo')
self.assertEqual(validations_list.list_validations(),
@ -50,13 +54,16 @@ class TestValidationActions(TestCase):
['networking'],
['product1'])]))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.os.access', return_value=True)
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
@mock.patch('validations_libs.utils.get_validations_playbook',
@mock.patch('validations_libs.utils.get_validations_playbook_paths',
return_value=['/tmp/foo/fake.yaml'])
def test_validation_skip_validation(self, mock_validation_play, mock_exists, mock_access):
def test_validation_skip_validation(self, mock_validation_play, mock_exists, mock_access,
mock_open, mock_yaml_load):
playbook = ['fake.yaml']
playbook = ['fake']
inventory = 'tmp/inventory.yaml'
skip_list = {'fake': {'hosts': 'ALL',
'reason': None,
@ -70,6 +77,8 @@ class TestValidationActions(TestCase):
limit_hosts=None)
self.assertEqual(run_return, [])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.current_time',
return_value='time')
@mock.patch('validations_libs.utils.uuid.uuid4',
@ -79,16 +88,15 @@ class TestValidationActions(TestCase):
return_value=True)
@mock.patch('validations_libs.utils.os.path.exists',
return_value=True)
@mock.patch('validations_libs.utils.get_validations_playbook',
@mock.patch('validations_libs.utils.get_validations_playbook_paths',
return_value=['/tmp/foo/fake.yaml'])
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_skip_on_specific_host(self, mock_ansible_run,
mock_validation_play,
mock_exists,
mock_access,
mock_makedirs,
mock_uuid,
mock_time):
mock_exists, mock_access,
mock_makedirs, mock_uuid,
mock_time, mock_open,
mock_yaml_load):
mock_ansible_run.return_value = ('fake.yaml', 0, 'successful')
run_called_args = {
@ -114,7 +122,7 @@ class TestValidationActions(TestCase):
'validation_cfg_file': None
}
playbook = ['fake.yaml']
validation = ['fake']
inventory = 'tmp/inventory.yaml'
skip_list = {'fake': {'hosts': 'cloud1',
'reason': None,
@ -123,13 +131,15 @@ class TestValidationActions(TestCase):
}
run = ValidationActions(log_path='/var/log/validations')
run_return = run.run_validations(playbook, inventory,
run_return = run.run_validations(validation, inventory,
validations_dir='/tmp/foo',
skip_list=skip_list,
limit_hosts='!cloud1')
mock_ansible_run.assert_called_with(**run_called_args)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.current_time',
return_value='time')
@mock.patch('validations_libs.utils.uuid.uuid4',
@ -139,7 +149,7 @@ class TestValidationActions(TestCase):
return_value=True)
@mock.patch('validations_libs.utils.os.path.exists',
return_value=True)
@mock.patch('validations_libs.utils.get_validations_playbook',
@mock.patch('validations_libs.utils.get_validations_playbook_paths',
return_value=['/tmp/foo/fake.yaml'])
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_skip_with_limit_host(self, mock_ansible_run,
@ -148,7 +158,9 @@ class TestValidationActions(TestCase):
mock_access,
mock_makedirs,
mock_uuid,
mock_time):
mock_time,
mock_open,
mock_yaml_load):
mock_ansible_run.return_value = ('fake.yaml', 0, 'successful')
run_called_args = {
@ -174,7 +186,7 @@ class TestValidationActions(TestCase):
'validation_cfg_file': None
}
playbook = ['fake.yaml']
validation = ['fake']
inventory = 'tmp/inventory.yaml'
skip_list = {'fake': {'hosts': 'cloud1',
'reason': None,
@ -183,38 +195,35 @@ class TestValidationActions(TestCase):
}
run = ValidationActions(log_path='/var/log/validations')
run_return = run.run_validations(playbook, inventory,
run_return = run.run_validations(validation, inventory,
validations_dir='/tmp/foo',
skip_list=skip_list,
limit_hosts='cloud,cloud1,!cloud2')
mock_ansible_run.assert_called_with(**run_called_args)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.os.makedirs')
@mock.patch('validations_libs.utils.os.access', return_value=True)
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
@mock.patch('validations_libs.validation_actions.ValidationLogs.get_results',
side_effect=fakes.FAKE_SUCCESS_RUN)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.utils.parse_validations')
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_success(self, mock_ansible_run,
mock_validation_dir,
mock_results, mock_exists, mock_access,
mock_makedirs):
mock_makedirs, mock_open, mock_safe_load):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {},
'path': '/tmp/foobar/validation-playbooks'}]
mock_validation = Validation('/tmp/foobar/validation-playbooks/foo.yaml')
mock_validation_dir.return_value = [mock_validation]
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
expected_run_return = fakes.FAKE_SUCCESS_RUN[0]
playbook = ['fake.yaml']
playbook = ['foo.yaml']
inventory = 'tmp/inventory.yaml'
run = ValidationActions()
@ -246,7 +255,7 @@ class TestValidationActions(TestCase):
validation_cfg_file=None
)
@mock.patch('validations_libs.utils.get_validations_playbook')
@mock.patch('validations_libs.utils.get_validations_playbook_paths')
def test_validation_run_wrong_validation_name(self, mock_validation_play):
mock_validation_play.return_value = []
@ -255,23 +264,19 @@ class TestValidationActions(TestCase):
validation_name=['fake'],
validations_dir='/tmp/foo')
@mock.patch('validations_libs.utils.get_validations_playbook')
def test_validation_run_not_all_found(self, mock_validation_play):
@mock.patch('builtins.open', side_effect=IOError)
@mock.patch('validations_libs.utils.get_validations_playbook_paths')
def test_validation_run_not_all_found(self, mock_validation_play, mock_open):
mock_validation_play.return_value = ['/tmp/foo/fake.yaml']
run = ValidationActions()
try:
run.run_validations(
validation_name=['fake', 'foo'],
validations_dir='/tmp/foo')
except ValidationRunException as run_exception:
self.assertEqual(
"Following validations were not found in '/tmp/foo': foo",
str(run_exception))
else:
self.fail("Runtime error exception should have been raised")
self.assertRaises(
ValidationRunException,
run.run_validations,
validation_name=['fake', 'foo'],
validations_dir='/tmp/foo')
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.utils.parse_validations')
def test_validation_run_not_enough_params(self, mock_validation_play):
mock_validation_play.return_value = []
@ -280,24 +285,22 @@ class TestValidationActions(TestCase):
validations_dir='/tmp/foo'
)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.utils.os.makedirs')
@mock.patch('validations_libs.utils.os.access', return_value=True)
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
@mock.patch('validations_libs.validation_logs.ValidationLogs.get_results')
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.utils.parse_validations')
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_failed(self, mock_ansible_run,
mock_validation_dir, mock_results,
mock_exists, mock_access,
mock_makedirs):
mock_makedirs, mock_open,
mock_yaml_load):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {},
'path': '/usr/share/ansible/validation-playbooks'}]
mock_validation = Validation('/tmp/foobar/validation-playbooks/foo.yaml')
mock_validation_dir.return_value = [mock_validation]
mock_ansible_run.return_value = ('foo.yaml', 0, 'failed')
@ -326,24 +329,22 @@ class TestValidationActions(TestCase):
validations_dir='/tmp/foo')
self.assertEqual(run_return, expected_run_return)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.ansible.Ansible._playbook_check',
side_effect=RuntimeError)
@mock.patch('validations_libs.utils.os.makedirs')
@mock.patch('validations_libs.utils.os.access', return_value=True)
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.utils.parse_validations')
def test_spinner_exception_failure_condition(self, mock_validation_dir,
mock_exists, mock_access,
mock_makedirs,
mock_playbook_check):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {},
'path': '/usr/share/ansible/validation-playbooks'}]
mock_playbook_check,
mock_open,
mock_yaml_load):
mock_validation = Validation('/tmp/foobar/validation-playbooks/foo.yaml')
mock_validation_dir.return_value = [mock_validation]
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
@ -353,24 +354,22 @@ class TestValidationActions(TestCase):
inventory, group=fakes.GROUPS_LIST,
validations_dir='/tmp/foo')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
@mock.patch('validations_libs.ansible.Ansible._playbook_check',
side_effect=RuntimeError)
@mock.patch('validations_libs.utils.os.makedirs')
@mock.patch('validations_libs.utils.os.access', return_value=True)
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.utils.parse_validations')
@mock.patch('sys.__stdin__.isatty', return_value=True)
def test_spinner_forced_run(self, mock_stdin_isatty, mock_validation_dir,
mock_exists, mock_access, mock_makedirs,
mock_playbook_check):
mock_playbook_check, mock_open,
mock_yaml_load):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {},
'path': '/usr/share/ansible/validation-playbooks'}]
mock_validation = Validation('/tmp/foobar/validation-playbooks/foo.yaml')
mock_validation_dir.return_value = [mock_validation]
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
@ -380,7 +379,7 @@ class TestValidationActions(TestCase):
inventory, group=fakes.GROUPS_LIST,
validations_dir='/tmp/foo')
@mock.patch('validations_libs.utils.get_validations_playbook',
@mock.patch('validations_libs.utils.get_validations_playbook_paths',
return_value=[])
def test_validation_run_no_validation(self, mock_get_val):
playbook = ['fake.yaml']
@ -390,7 +389,7 @@ class TestValidationActions(TestCase):
self.assertRaises(ValidationRunException, run.run_validations, playbook,
inventory)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
@mock.patch('validations_libs.utils.parse_validations',
return_value=fakes.VALIDATIONS_LIST)
@mock.patch('validations_libs.validation.Validation._get_content',
return_value=fakes.FAKE_PLAYBOOK[0])
@ -412,7 +411,7 @@ class TestValidationActions(TestCase):
data.update({'Last execution date': '2019-11-25 13:40:14',
'Number of execution': 'Total: 1, Passed: 0, Failed: 1'})
validations_show = ValidationActions()
out = validations_show.show_validations('512e')
out = validations_show.show_validation('512e')
self.assertEqual(out, data)
@mock.patch('os.path.exists', return_value=False)
@ -420,15 +419,22 @@ class TestValidationActions(TestCase):
validations_show = ValidationActions()
self.assertRaises(
ValidationShowException,
validations_show.show_validations,
validations_show.show_validation,
'512e'
)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk',
return_value=fakes.VALIDATIONS_LIST)
@mock.patch('yaml.safe_load', return_value=fakes.GROUP)
@mock.patch('validations_libs.utils.parse_validations',
autospec=True)
@mock.patch('yaml.safe_load')
@mock.patch('builtins.open')
def test_group_information(self, mock_open, mock_yaml, mock_data):
def test_group_information(self, mock_open, mock_validation_yaml, mock_data):
parsed_yamls = [[element] for element in fakes.VALIDATIONS_LIST]
parsed_yamls.append(fakes.GROUP)
mock_validation_yaml.side_effect = parsed_yamls
mock_data.return_value = [
Validation('/foo/bar.yaml'),
Validation('/fizz/buzz.yaml')
]
v_actions = ValidationActions()
col, values = v_actions.group_information()
self.assertEqual(col, ('Groups', 'Description',
@ -465,7 +471,7 @@ class TestValidationActions(TestCase):
v_actions.show_validations_parameters,
products={'foo': 'bar'})
@mock.patch('validations_libs.utils.get_validations_playbook',
@mock.patch('validations_libs.utils.get_validations_playbook_paths',
return_value=['/foo/playbook/foo.yaml'])
@mock.patch('validations_libs.utils.get_validations_parameters')
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)

View File

@ -31,6 +31,7 @@ from validations_libs import constants
from validations_libs.group import Group
from validations_libs.validation import Validation
from validations_libs.logger import getLogger
from validations_libs import exceptions
LOG = getLogger(__name__ + ".utils")
@ -151,17 +152,21 @@ def create_artifacts_dir(log_path=constants.VALIDATIONS_LOG_BASEDIR,
raise RuntimeError()
def parse_all_validations_on_disk(path,
groups=None,
categories=None,
products=None,
validation_config=None):
"""Return a list of validations metadata which can be sorted by Groups, by
Categories or by Products.
def parse_validations(path,
validation_ids=None,
groups=None,
categories=None,
products=None,
validation_config=None):
"""Return a list of validations metadata dictionaries which
can be sorted by Groups, Categories or Products.
:param path: The absolute path of the validations directory
:type path: `string`
:param validation_ids: Names of requested validations
:type validation_ids: `list`
:param groups: Groups of validations
:type groups: `list`
@ -175,29 +180,24 @@ def parse_all_validations_on_disk(path,
loaded from an validation.cfg file.
:type validation_config: ``dict``
:return: A list of validations metadata.
:rtype: `list`
:return: A list of `Validation` objects
:rtype: ``list``
:Example:
>>> path = '/foo/bar'
>>> parse_all_validations_on_disk(path)
[{'categories': ['storage'],
'products': ['product1'],
'description': 'Detect whether the node disks use Advanced Format.',
'groups': ['prep', 'pre-deployment'],
'id': '512e',
'name': 'Advanced Format 512e Support'},
{'categories': ['system'],
'products': ['product1'],
'description': 'Make sure that the server has enough CPU cores.',
'groups': ['prep', 'pre-introspection'],
'id': 'check-cpu',
'name': 'Verify if the server fits the CPU core requirements'}]
>>> [type(val) for val in parse_all_validations_on_disk(path)]
[<class 'validations_libs.validation.Validation'>,
<class 'validations_libs.validation.Validation'>]
"""
if not isinstance(path, str):
raise TypeError("The 'path' argument must be a String")
if not validation_ids:
validation_ids = []
elif not isinstance(validation_ids, list):
raise TypeError("The 'validation_ids' argument must be a List")
if not groups:
groups = []
elif not isinstance(groups, list):
@ -214,62 +214,63 @@ def parse_all_validations_on_disk(path,
raise TypeError("The 'products' argument must be a List")
results = []
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
if community_validations_on(validation_config):
validations_abspath.extend(glob.glob("{}/*.yaml".format(
constants.COMMUNITY_PLAYBOOKS_DIR)))
validations_abspaths = get_validations_playbook_paths(
path=path,
validation_config=validation_config)
LOG.debug(
"Attempting to parse validations by:\n"
" - ids: {}\n"
" - groups: {}\n"
" - categories: {}\n"
" - products: {}\n"
"from {}".format(groups, categories, products, validations_abspath)
"from {}".format(validation_ids, groups,
categories, products, validations_abspaths)
)
for playbook in validations_abspath:
val = Validation(playbook)
if not groups and not categories and not products:
results.append(val.get_metadata)
for playbook in validations_abspaths:
try:
val = Validation(playbook)
except exceptions.ValidationParsingException:
LOG.error(
"Attempt to parse playbook at location {} has failed."
"Playbooks is either not a properly formatted validation "
"or a generic ansible playbook.".format(playbook))
continue
# Skip further evaluation and return all if we have nothing to select by.
if not (validation_ids or groups or categories or products):
results.append(val)
continue
if validation_ids:
if val.id in validation_ids:
results.append(val)
if groups:
if set(groups).intersection(val.groups):
results.append(val)
if categories:
if set(categories).intersection(val.categories):
results.append(val)
if products:
if set(products).intersection(val.products):
results.append(val)
if set(groups).intersection(val.groups) or \
set(categories).intersection(val.categories) or \
set(products).intersection(val.products):
results.append(val.get_metadata)
LOG.debug("Retrieved {v_count} validations in pre-defined paths: {paths}".format(
v_count=len(results),
paths=':'.join(constants.VALIDATION_PLAYBOOK_DIRS)))
return results
def get_validations_playbook(path,
validation_id=None,
groups=None,
categories=None,
products=None,
validation_config=None):
"""Get a list of validations playbooks paths either by their names,
their groups, by their categories or by their products.
def get_validations_playbook_paths(path,
validation_config=None):
"""Get a list of validations playbooks paths.
:param path: Path of the validations playbooks
:type path: `string`
:param validation_id: List of validation name
:type validation_id: `list`
:param groups: List of validation group
:type groups: `list`
:param categories: List of validation category
:type categories: `list`
:param products: List of validation product
:type products: `list`
:param validation_config: A dictionary of configuration for Validation
loaded from an validation.cfg file.
:type validation_config: ``dict``
:return: A list of absolute validations playbooks path
:return: A list of absolute validations playbooks paths
:rtype: `list`
:Example:
@ -290,49 +291,18 @@ def get_validations_playbook(path,
if not isinstance(path, str):
raise TypeError("The 'path' argument must be a String")
if not validation_id:
validation_id = []
elif not isinstance(validation_id, list):
raise TypeError("The 'validation_id' argument must be a List")
if not groups:
groups = []
elif not isinstance(groups, list):
raise TypeError("The 'groups' argument must be a List")
if not categories:
categories = []
elif not isinstance(categories, list):
raise TypeError("The 'categories' argument must be a List")
if not products:
products = []
elif not isinstance(products, list):
raise TypeError("The 'products' argument must be a List")
pl = []
validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
if community_validations_on(validation_config):
validations_abspath.extend(glob.glob("{}/*.yaml".format(
constants.COMMUNITY_PLAYBOOKS_DIR)))
for pl_path in validations_abspath:
if os.path.isfile(pl_path):
if validation_id:
if os.path.splitext(os.path.basename(pl_path))[0] in validation_id or \
os.path.basename(pl_path) in validation_id:
pl.append(pl_path)
constants.COMMUNITY_PLAYBOOKS_DIR)))
val = Validation(pl_path)
if groups:
if set(groups).intersection(val.groups):
pl.append(pl_path)
if categories:
if set(categories).intersection(val.categories):
pl.append(pl_path)
if products:
if set(products).intersection(val.products):
pl.append(pl_path)
return pl
for possible_path in constants.VALIDATION_PLAYBOOK_DIRS:
validations_abspath.extend(glob.glob("{}/*.yaml".format(possible_path)))
# Filter out all non files that somehow got in
validations_abspath = [path for path in validations_abspath if os.path.isfile(path)]
return validations_abspath
def get_validation_parameters(validation):
@ -439,75 +409,33 @@ def get_validations_data(
return data
def get_validations_parameters(validations_data,
validation_name=None,
groups=None,
categories=None,
products=None):
def get_validations_parameters(validations_data):
"""Return parameters for a list of validations
:param validations_data: A list of absolute validations playbooks path
:param validations_data: A list of `Validation` objects
:type validations_data: `list`
:param validation_name: A list of validation name
:type validation_name: `list`
:param groups: A list of validation groups
:type groups: `list`
:param categories: A list of validation categories
:type categories: `list`
:param products: A list of validation products
:type products: `list`
:return: a dictionary containing the current parameters for
each `validation_name` or `groups`
each `Validation` object
:rtype: `dict`
:Example:
>>> validations_data = ['/foo/bar/check-ram.yaml',
'/foo/bar/check-cpu.yaml']
>>> validation_name = ['check-ram', 'check-cpu']
>>> get_validations_parameters(validations_data, validation_name)
>>> validations_data = [Validation('/foo/bar/check-ram.yaml),
Validation('/foo/bar/check-cpu.yaml)]
>>> get_validations_parameters(validations_data)
{'check-cpu': {'parameters': {'minimal_cpu_count': 8}},
'check-ram': {'parameters': {'minimal_ram_gb': 24}}}
"""
if not isinstance(validations_data, list):
raise TypeError("The 'validations_data' argument must be a List")
if not validation_name:
validation_name = []
elif not isinstance(validation_name, list):
raise TypeError("The 'validation_name' argument must be a List")
if not groups:
groups = []
elif not isinstance(groups, list):
raise TypeError("The 'groups' argument must be a List")
if not categories:
categories = []
elif not isinstance(categories, list):
raise TypeError("The 'categories' argument must be a List")
if not products:
products = []
elif not isinstance(products, list):
raise TypeError("The 'products' argument must be a List")
params = {}
for val in validations_data:
v = Validation(val)
if v.id in validation_name or \
set(groups).intersection(v.groups) or \
set(categories).intersection(v.categories) or \
set(products).intersection(v.products):
params[v.id] = {
'parameters': v.get_vars
}
params[val.id] = {
'parameters': val.get_vars
}
return params

View File

@ -14,6 +14,7 @@
#
from validations_libs.logger import getLogger
import validations_libs.exceptions as exceptions
import os
import yaml
from collections import OrderedDict
@ -88,15 +89,21 @@ class Validation:
def __init__(self, validation_path):
self.dict = self._get_content(validation_path)
if not self.has_metadata_dict:
raise exceptions.ValidationParsingException(
"No metadata found in validation {}".format(validation_path))
self.id = os.path.splitext(os.path.basename(validation_path))[0]
self.path = os.path.dirname(validation_path)
self.path = validation_path
self.metadata = {'id': self.id, 'path': self.path}
self.metadata.update(self.dict['vars'].get('metadata'))
def _get_content(self, val_path):
try:
with open(val_path, 'r') as val_playbook:
return yaml.safe_load(val_playbook)[0]
except IOError:
raise IOError("Validation playbook not found")
except (IOError, KeyError) as ex:
raise exceptions.ValidationParsingException(
"Error while parsing validation playbook: {}".format(ex))
@property
def has_vars_dict(self):
@ -163,9 +170,7 @@ class Validation:
"""Get the metadata of a validation
:return: The validation metadata
:rtype: `dict` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `dict`
:Example:
@ -178,25 +183,16 @@ class Validation:
'products': ['product1', 'product2'],
'id': 'val1',
'name': 'The validation val1\'s name',
'path': '/tmp/foo/'}
'path': '/tmp/foo/val1.yaml'}
"""
if self.has_metadata_dict:
self.metadata = {'id': self.id, 'path': self.path}
self.metadata.update(self.dict['vars'].get('metadata'))
return self.metadata
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.metadata
@property
def get_vars(self):
"""Get only the variables of a validation
:return: All the variables belonging to a validation
:rtype: `dict` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:return: All the variables belonging to a validation apart from metadata
:rtype: `dict`
:Example:
@ -206,14 +202,9 @@ class Validation:
{'var_name1': 'value1',
'var_name2': 'value2'}
"""
if self.has_metadata_dict:
validation_vars = self.dict['vars'].copy()
validation_vars.pop('metadata')
return validation_vars
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
validation_vars = self.dict['vars'].copy()
validation_vars.pop('metadata')
return validation_vars
@property
def get_data(self):
@ -244,9 +235,7 @@ class Validation:
"""Get the validation list of groups
:return: A list of groups for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -255,21 +244,14 @@ class Validation:
>>> print(val.groups)
['group1', 'group2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('groups', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('groups', [])
@property
def categories(self):
"""Get the validation list of categories
:return: A list of categories for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -278,21 +260,14 @@ class Validation:
>>> print(val.categories)
['category1', 'category2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('categories', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('categories', [])
@property
def products(self):
"""Get the validation list of products
:return: A list of products for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -301,12 +276,7 @@ class Validation:
>>> print(val.products)
['product1', 'product2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('products', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('products', [])
@property
def get_id(self):
@ -342,8 +312,6 @@ class Validation:
the list of 'Categories', the list of `Groups`, the `ID` and
the `Name`.
:rtype: `dict`
:raise: A `NameError` exception if no metadata has been found in the
playbook
:Example:
@ -359,12 +327,11 @@ class Validation:
'path': '/tmp/foo/'}
"""
data = {}
metadata = self.get_metadata
for key in metadata:
for key in self.metadata:
if key == 'id':
data[key.upper()] = metadata.get(key)
data[key.upper()] = self.metadata.get(key)
else:
data[key.capitalize()] = metadata.get(key)
data[key.capitalize()] = self.metadata.get(key)
return data

View File

@ -12,19 +12,21 @@
# License for the specific language governing permissions and limitations
# under the License.
#
from validations_libs.logger import getLogger
import json
import os
import sys
import json
import yaml
from validations_libs.ansible import Ansible as v_ansible
from validations_libs.group import Group
from validations_libs.cli.common import Spinner
from validations_libs.validation_logs import ValidationLogs, ValidationLog
from validations_libs import constants
from validations_libs import utils as v_utils
from validations_libs.exceptions import ValidationRunException, ValidationShowException
from validations_libs.ansible import Ansible as v_ansible
from validations_libs.cli.common import Spinner
from validations_libs.exceptions import (ValidationRunException,
ValidationShowException)
from validations_libs.group import Group
from validations_libs.logger import getLogger
from validations_libs.validation_logs import ValidationLog, ValidationLogs
LOG = getLogger(__name__ + ".validation_actions")
@ -43,7 +45,7 @@ class ValidationActions:
"""
def __init__(self, validation_path=constants.ANSIBLE_VALIDATION_DIR,
def __init__(self, base_validation_path=constants.ANSIBLE_VALIDATION_DIR,
groups_path=constants.VALIDATION_GROUPS_INFO,
log_path=constants.VALIDATIONS_LOG_BASEDIR):
"""
@ -56,7 +58,7 @@ class ValidationActions:
:type log_path: ``string``
"""
self.log = getLogger(__name__ + ".ValidationActions")
self.validation_path = validation_path
self.base_validation_path = base_validation_path
self.log_path = log_path
self.groups_path = groups_path
@ -121,8 +123,8 @@ class ValidationActions:
"""
self.log = getLogger(__name__ + ".list_validations")
validations = v_utils.parse_all_validations_on_disk(
path=self.validation_path,
validations = v_utils.parse_validations(
path=self.base_validation_path,
groups=groups,
categories=categories,
products=products,
@ -134,17 +136,17 @@ class ValidationActions:
)
return_values = [
(val.get('id'), val.get('name'),
val.get('groups'), val.get('categories'),
val.get('products'))
(val.metadata.get('id'), val.metadata.get('name'),
val.metadata.get('groups'), val.metadata.get('categories'),
val.metadata.get('products'))
for val in validations]
column_names = ('ID', 'Name', 'Groups', 'Categories', 'Products')
return (column_names, return_values)
def show_validations(self, validation,
validation_config=None):
def show_validation(self, validation,
validation_config=None):
"""Display detailed information about a Validation
:param validation: The name of the validation
@ -153,7 +155,6 @@ class ValidationActions:
loaded from an validation.cfg file.
:type validation_config: ``dict``
:return: The detailed information for a validation
:rtype: `dict`
@ -164,7 +165,7 @@ class ValidationActions:
>>> path = "/foo/bar"
>>> validation = 'foo'
>>> action = ValidationActions(validation_path=path)
>>> results = action.show_validations(validation=validation)
>>> results = action.show_validation(validation=validation)
>>> print(results)
{
'Description': 'Description of the foo validation',
@ -177,21 +178,20 @@ class ValidationActions:
'Parameters': {'foo1': bar1}
}
"""
self.log = getLogger(__name__ + ".show_validations")
self.log = getLogger(__name__ + ".show_validation")
# Get validation data:
vlog = ValidationLogs(self.log_path)
data = v_utils.get_validations_data(
validation,
self.validation_path,
self.base_validation_path,
validation_config=validation_config)
if not data:
extra_msg = ""
searched_paths = constants.VALIDATION_PLAYBOOK_DIRS.copy()
if v_utils.community_validations_on(validation_config):
extra_msg = " or {}".format(constants.COMMUNITY_LIBRARY_DIR)
msg = "Validation {} not found in the path: {}{}".format(
searched_paths.append(str(constants.COMMUNITY_PLAYBOOKS_DIR))
msg = "Validation {} not found in paths: {}".format(
validation,
self.validation_path,
extra_msg)
", ".join(searched_paths))
raise ValidationShowException(msg)
logfiles = vlog.get_logfile_content_by_validation(validation)
@ -319,7 +319,7 @@ class ValidationActions:
extra_vars=None, validations_dir=None,
extra_env_vars=None, ansible_cfg=None, quiet=True,
limit_hosts=None, run_async=False,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
base_dir=constants.DEFAULT_ANSIBLE_BASEDIR,
python_interpreter=None, skip_list=None,
callback_whitelist=None,
output_callback='vf_validation_stdout', ssh_user=None,
@ -418,48 +418,44 @@ class ValidationActions:
self.log = getLogger(__name__ + ".run_validations")
playbooks = []
validations_dir = (validations_dir if validations_dir
else self.validation_path)
if group or category or product:
self.log.debug(
"Getting the validations list by:\n"
" - groups: {}\n"
" - categories: {}\n"
" - products: {}".format(group, category, product)
)
validations = v_utils.parse_all_validations_on_disk(
path=validations_dir, groups=group,
categories=category, products=product,
validation_config=validation_config
)
for val in validations:
playbooks.append("{path}/{id}.yaml".format(**val))
elif validation_name:
else self.base_validation_path)
self.log.debug(
"Getting the validations list by:\n"
" - ids: {}\n"
" - groups: {}\n"
" - categories: {}\n"
" - products: {}".format(validation_name, group, category, product)
)
validations = v_utils.parse_validations(
path=validations_dir, validation_ids=validation_name,
groups=group,
categories=category, products=product,
validation_config=validation_config)
if validation_name:
self.log.debug(
"Getting the {} validation.".format(
validation_name))
playbooks = v_utils.get_validations_playbook(
validations_dir,
validation_name,
validation_config=validation_config)
if not validations or len(validation_name) != len(validations):
found_validations = []
for val in validations:
found_validations.append(val.id)
if not playbooks or len(validation_name) != len(playbooks):
found_playbooks = []
for play in playbooks:
found_playbooks.append(
os.path.basename(os.path.splitext(play)[0]))
unknown_validations = list(
set(validation_name) - set(found_playbooks))
missing_validations = list(
set(validation_name) - set(found_validations))
msg = (
"Following validations were not found in '{}': {}"
).format(validations_dir, ', '.join(unknown_validations))
).format(validations_dir, ', '.join(missing_validations))
raise ValidationRunException(msg)
else:
if len(validations) == 0:
raise ValidationRunException("No validations found")
for val in validations:
playbooks.append("{path}".format(path=val.path))
log_path = v_utils.create_log_dir(self.log_path)
self.log.debug((
@ -589,15 +585,15 @@ class ValidationActions:
group_info = []
validations = v_utils.parse_all_validations_on_disk(
path=self.validation_path,
validations = v_utils.parse_validations(
path=self.base_validation_path,
groups=[group[0] for group in group_definitions],
validation_config=validation_config)
# Get validations number by group
for group in group_definitions:
n_matches = len(
[val for val in validations if group[0] in val['groups']])
[val for val in validations if group[0] in val.metadata['groups']])
group_info.append((
group[0],
group[1],
@ -675,22 +671,16 @@ class ValidationActions:
if output_format not in supported_format:
raise ValidationShowException("{} output format not supported".format(output_format))
validation_playbooks = v_utils.get_validations_playbook(
path=self.validation_path,
validation_id=validations,
validations = v_utils.parse_validations(
path=self.base_validation_path,
validation_ids=validations,
groups=groups,
categories=categories,
products=products,
validation_config=validation_config
)
params = v_utils.get_validations_parameters(
validations_data=validation_playbooks,
validation_name=validations,
groups=groups,
categories=categories,
products=products
)
params = v_utils.get_validations_parameters(validations_data=validations)
if download_file:
params_only = {}