DNM First steps towards new validations

Signed-off-by: Jiri Podivin <jpodivin@redhat.com>
Change-Id: Id7eaebc7bd9a4a80d09f9c98ca6fb58173488003
This commit is contained in:
Jiri Podivin 2023-01-27 16:47:17 +01:00
parent 430530fad5
commit bb579a1c63
13 changed files with 114 additions and 169 deletions

View File

@ -320,7 +320,7 @@ class Ansible:
def run(self, playbook, inventory, workdir, playbook_dir=None,
connection='smart', output_callback=None,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
base_dir=constants.DEFAULT_ANSIBLE_BASEDIR,
ssh_user=None, key=None, module_path=None,
limit_hosts=None, tags=None, skip_tags=None,
verbosity=0, quiet=False, extra_vars=None,

View File

@ -56,7 +56,7 @@ class CommunityValidationInit(BaseCommand):
"is located."))
parser.add_argument('--ansible-base-dir', dest='ansible_base_dir',
default=constants.DEFAULT_VALIDATIONS_BASEDIR,
default=constants.DEFAULT_ANSIBLE_BASEDIR,
help=("Path where the ansible roles, library "
"and plugins are located."))
return parser

View File

@ -58,7 +58,7 @@ class ValidationList(BaseLister):
validation_dir = parsed_args.validation_dir
group = parsed_args.group
v_actions = ValidationActions(validation_path=validation_dir)
v_actions = ValidationActions(base_validation_path=validation_dir)
return (v_actions.list_validations(groups=group,
categories=category,
products=product,

View File

@ -52,7 +52,7 @@ class Run(BaseCommand):
help=cli_constants.PLAY_PATH_DESC)
parser.add_argument('--ansible-base-dir', dest='ansible_base_dir',
default=constants.DEFAULT_VALIDATIONS_BASEDIR,
default=constants.DEFAULT_ANSIBLE_BASEDIR,
help=("Path where the ansible roles, library "
"and plugins are located.\n"))

View File

@ -44,7 +44,7 @@ class Show(BaseShow):
validation_dir = parsed_args.validation_dir
validation_name = parsed_args.validation_name
v_actions = ValidationActions(validation_path=validation_dir)
v_actions = ValidationActions(base_validation_path=validation_dir)
data = v_actions.show_validations(
validation_name, validation_config=self.base.config)

View File

@ -15,16 +15,12 @@
# under the License.
#
from validations_libs.logger import getLogger
import re
import os
# @matbu backward compatibility for stable/train
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
import re
from pathlib import Path
from validations_libs import constants, utils
from validations_libs.logger import getLogger
LOG = getLogger(__name__)
@ -40,7 +36,7 @@ class CommunityValidation:
self,
validation_name,
validation_dir=constants.ANSIBLE_VALIDATION_DIR,
ansible_base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR):
ansible_base_dir=constants.DEFAULT_ANSIBLE_BASEDIR):
"""Construct Role and Playbook."""
self._validation_name = validation_name

View File

@ -22,25 +22,29 @@ or as a fallback, when custom locations fail.
import os
# @matbu backward compatibility for stable/train
try:
from pathlib import Path
except ImportError:
from pathlib2 import Path
from pathlib import Path
DEFAULT_VALIDATIONS_BASEDIR = '/usr/share/ansible'
DEFAULT_ANSIBLE_BASEDIR = '/usr/share/ansible'
ANSIBLE_VALIDATION_DIR = os.path.join(
DEFAULT_VALIDATIONS_BASEDIR,
DEFAULT_ANSIBLE_BASEDIR,
'validation-playbooks')
ANSIBLE_ROLES_DIR = Path.joinpath(Path(DEFAULT_VALIDATIONS_BASEDIR),
ANSIBLE_ROLES_DIR = Path.joinpath(Path(DEFAULT_ANSIBLE_BASEDIR),
'roles')
VALIDATION_GROUPS_INFO = os.path.join(
DEFAULT_VALIDATIONS_BASEDIR,
DEFAULT_ANSIBLE_BASEDIR,
'groups.yaml')
COLLECTION_VALIDATIONS_PATH = 'collections/ansible-collections/validations/*/playbooks/'
VALIDATION_PLAYBOOK_DIRS = [
ANSIBLE_VALIDATION_DIR,
os.path.join(os.path.expanduser('~'), COLLECTION_VALIDATIONS_PATH),
os.path.join(DEFAULT_ANSIBLE_BASEDIR, COLLECTION_VALIDATIONS_PATH)
]
# NOTE(fressi) The HOME folder environment variable may be undefined.
VALIDATIONS_LOG_BASEDIR = os.path.expanduser('~/validations')

View File

@ -39,3 +39,13 @@ class ValidationShowException(Exception):
of the `ValidationsActions` class, cause unacceptable behavior
from which it is impossible to recover.
"""
class ValidationParsingException(Exception):
"""ValidationParsingException is to be raised when playbook
retrieved from storage and parsed in the `__init__` method
of the `Validation` object is malformed, or incompatible with
the requirements of validation runtime.
The exception should be raised as soon as possible after parsing
of the file, in order to ensure the fastest report and recovery.
"""

View File

@ -16,23 +16,10 @@
import logging
import os
import subprocess
from pathlib import PosixPath
from unittest import TestCase, mock
try:
from unittest import mock
except ImportError:
import mock
# @matbu backward compatibility for stable/train
try:
from pathlib import PosixPath
PATHLIB = 'pathlib'
except ImportError:
from pathlib2 import PosixPath
PATHLIB = 'pathlib2'
from unittest import TestCase
from validations_libs import utils, constants
from validations_libs import constants, utils
from validations_libs.tests import fakes
@ -41,6 +28,16 @@ class TestUtils(TestCase):
def setUp(self):
super(TestUtils, self).setUp()
self.logger = mock.patch('validations_libs.logger.getLogger')
# Mocking all glob calls, so that only the first path will
# return any items
globs = [['/foo/playbook/foo.yaml'], []]
globs.extend(len(constants.VALIDATION_PLAYBOOK_DIRS)*[[]])
def _return_empty_list(_=None):
while True:
yield []
self.globs_first_only = globs
self.return_empty_list = _return_empty_list
@mock.patch('validations_libs.validation.Validation._get_content',
return_value=fakes.FAKE_PLAYBOOK[0])
@ -106,8 +103,8 @@ class TestUtils(TestCase):
@mock.patch('glob.glob')
def test_parse_all_validations_on_disk(self, mock_glob, mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_all_validations_on_disk('/foo/playbook')
self.assertEqual(result, [fakes.FAKE_METADATA])
@ -116,8 +113,8 @@ class TestUtils(TestCase):
@mock.patch('glob.glob')
def test_parse_community_validations_on_disk(
self, mock_glob, mock_open, mock_load):
mock_glob.side_effect = \
([], ['/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_all_validations_on_disk('/foo/playbook')
self.assertEqual(result, [fakes.FAKE_METADATA])
@ -126,8 +123,7 @@ class TestUtils(TestCase):
@mock.patch('glob.glob')
def test_parse_all_community_disabled_validations_on_disk(
self, mock_glob, mock_open, mock_load):
mock_glob.side_effect = \
([], ['/foo/playbook/foo.yaml'])
mock_glob.side_effect = self.return_empty_list()
result = utils.parse_all_validations_on_disk(
'/foo/playbook',
validation_config={'default': {"enable_community_validations": False}})
@ -186,8 +182,7 @@ class TestUtils(TestCase):
def test_parse_all_validations_on_disk_by_group(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_all_validations_on_disk('/foo/playbook',
['prep'])
self.assertEqual(result, [fakes.FAKE_METADATA])
@ -198,8 +193,7 @@ class TestUtils(TestCase):
def test_parse_all_validations_on_disk_by_category(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = \
(['/foo/playbook/foo.yaml'], [])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_all_validations_on_disk('/foo/playbook',
categories=['os'])
self.assertEqual(result, [fakes.FAKE_METADATA])
@ -215,7 +209,7 @@ class TestUtils(TestCase):
def test_parse_all_validations_on_disk_by_product(self, mock_glob,
mock_open,
mock_load):
mock_glob.side_effect = (['/foo/playbook/foo.yaml'], [])
mock_glob.side_effect = self.globs_first_only
result = utils.parse_all_validations_on_disk('/foo/playbook',
products=['product1'])
self.assertEqual(result, [fakes.FAKE_METADATA])
@ -521,13 +515,13 @@ class TestUtils(TestCase):
results['ansible_environment']['ANSIBLE_STDOUT_CALLBACK'],
fakes.ANSIBLE_ENVIRONNMENT_CONFIG['ANSIBLE_STDOUT_CALLBACK'])
@mock.patch('{}.Path.exists'.format(PATHLIB),
@mock.patch('pathlib.Path.exists',
return_value=False)
@mock.patch('{}.Path.is_dir'.format(PATHLIB),
@mock.patch('pathlib.Path.is_dir',
return_value=False)
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.iterdir',
return_value=iter([]))
@mock.patch('{}.Path.mkdir'.format(PATHLIB))
@mock.patch('pathlib.Path.mkdir')
def test_check_creation_community_validations_dir(self, mock_mkdir,
mock_iterdir,
mock_isdir,
@ -543,11 +537,11 @@ class TestUtils(TestCase):
PosixPath("/foo/bar/community-validations/lookup_plugins")]
)
@mock.patch('{}.Path.is_dir'.format(PATHLIB), return_value=True)
@mock.patch('{}.Path.exists'.format(PATHLIB), return_value=True)
@mock.patch('{}.Path.iterdir'.format(PATHLIB),
@mock.patch('pathlib.Path.is_dir', return_value=True)
@mock.patch('pathlib.Path.exists', return_value=True)
@mock.patch('pathlib.Path.iterdir',
return_value=fakes.FAKE_COVAL_MISSING_SUBDIR_ITERDIR1)
@mock.patch('{}.Path.mkdir'.format(PATHLIB))
@mock.patch('pathlib.Path.mkdir')
def test_check_community_validations_dir_with_missing_subdir(self,
mock_mkdir,
mock_iterdir,

View File

@ -21,6 +21,7 @@ from unittest import TestCase
from validations_libs.validation import Validation
from validations_libs.tests import fakes
from validations_libs import exceptions
class TestValidation(TestCase):
@ -44,10 +45,10 @@ class TestValidation(TestCase):
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_metadata_wrong_playbook(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_metadata
self.assertEqual('No metadata found in validation foo',
def test_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(exceptions.ValidationParsingException) as exc_mgr:
Validation('/tmp/foo')
self.assertEqual('No metadata found in validation /tmp/foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
@ -64,14 +65,6 @@ class TestValidation(TestCase):
data = val.get_vars
self.assertEqual(data, {})
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_vars_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_vars
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_id(self, mock_open, mock_yaml):
@ -88,14 +81,6 @@ class TestValidation(TestCase):
groups = val.groups
self.assertEqual(groups, ['prep', 'pre-deployment'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_groups_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').groups
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_groups_with_no_existing_groups(self, mock_open, mock_yaml):
@ -110,14 +95,6 @@ class TestValidation(TestCase):
categories = val.categories
self.assertEqual(categories, ['os', 'storage'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_categories_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').categories
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_categories_with_no_existing_categories(self, mock_open, mock_yaml):
@ -132,14 +109,6 @@ class TestValidation(TestCase):
products = val.products
self.assertEqual(products, ['product1'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_products_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').products
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK3)
@mock.patch('builtins.open')
def test_products_with_no_existing_products(self, mock_open, mock_yaml):
@ -161,14 +130,6 @@ class TestValidation(TestCase):
data = val.get_formated_data
self.assertEqual(data, fakes.FORMATED_DATA)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('builtins.open')
def test_get_formated_data_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
Validation('/tmp/foo').get_formated_data
self.assertEqual('No metadata found in validation foo',
str(exc_mgr.exception))
@mock.patch('builtins.open')
def test_validation_not_found(self, mock_open):
mock_open.side_effect = IOError()

View File

@ -31,6 +31,7 @@ from validations_libs import constants
from validations_libs.group import Group
from validations_libs.validation import Validation
from validations_libs.logger import getLogger
from validations_libs import exceptions
LOG = getLogger(__name__ + ".utils")
@ -219,6 +220,9 @@ def parse_all_validations_on_disk(path,
validations_abspath.extend(glob.glob("{}/*.yaml".format(
constants.COMMUNITY_PLAYBOOKS_DIR)))
for possible_path in constants.VALIDATION_PLAYBOOK_DIRS:
validations_abspath.extend(glob.glob("{}/*.yaml".format(possible_path)))
LOG.debug(
"Attempting to parse validations by:\n"
" - groups: {}\n"
@ -227,7 +231,14 @@ def parse_all_validations_on_disk(path,
"from {}".format(groups, categories, products, validations_abspath)
)
for playbook in validations_abspath:
val = Validation(playbook)
try:
val = Validation(playbook)
except exceptions.ValidationParsingException:
LOG.error(
"Attempt to parse playbook at location {} has failed."
"Playbooks is either not a properly formatted validation "
"or a generic ansible playbook.".format(playbook))
continue
if not groups and not categories and not products:
results.append(val.get_metadata)

View File

@ -14,6 +14,7 @@
#
from validations_libs.logger import getLogger
import validations_libs.exceptions as exceptions
import os
import yaml
from collections import OrderedDict
@ -88,6 +89,9 @@ class Validation:
def __init__(self, validation_path):
self.dict = self._get_content(validation_path)
if not self.has_metadata_dict:
raise exceptions.ValidationParsingException(
"No metadata found in validation {}".format(validation_path))
self.id = os.path.splitext(os.path.basename(validation_path))[0]
self.path = os.path.dirname(validation_path)
@ -163,9 +167,7 @@ class Validation:
"""Get the metadata of a validation
:return: The validation metadata
:rtype: `dict` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `dict`
:Example:
@ -180,23 +182,16 @@ class Validation:
'name': 'The validation val1\'s name',
'path': '/tmp/foo/'}
"""
if self.has_metadata_dict:
self.metadata = {'id': self.id, 'path': self.path}
self.metadata.update(self.dict['vars'].get('metadata'))
return self.metadata
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
self.metadata = {'id': self.id, 'path': self.path}
self.metadata.update(self.dict['vars'].get('metadata'))
return self.metadata
@property
def get_vars(self):
"""Get only the variables of a validation
:return: All the variables belonging to a validation
:rtype: `dict` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `dict`
:Example:
@ -206,14 +201,9 @@ class Validation:
{'var_name1': 'value1',
'var_name2': 'value2'}
"""
if self.has_metadata_dict:
validation_vars = self.dict['vars'].copy()
validation_vars.pop('metadata')
return validation_vars
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
validation_vars = self.dict['vars'].copy()
validation_vars.pop('metadata')
return validation_vars
@property
def get_data(self):
@ -244,9 +234,7 @@ class Validation:
"""Get the validation list of groups
:return: A list of groups for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -255,21 +243,14 @@ class Validation:
>>> print(val.groups)
['group1', 'group2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('groups', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('groups', [])
@property
def categories(self):
"""Get the validation list of categories
:return: A list of categories for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -278,21 +259,14 @@ class Validation:
>>> print(val.categories)
['category1', 'category2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('categories', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('categories', [])
@property
def products(self):
"""Get the validation list of products
:return: A list of products for the validation
:rtype: `list` or `None` if no metadata has been found
:raise: A `NameError` exception if no metadata has been found in the
playbook
:rtype: `list`
:Example:
@ -301,12 +275,7 @@ class Validation:
>>> print(val.products)
['product1', 'product2']
"""
if self.has_metadata_dict:
return self.dict['vars']['metadata'].get('products', [])
else:
raise NameError(
"No metadata found in validation {}".format(self.id)
)
return self.dict['vars']['metadata'].get('products', [])
@property
def get_id(self):
@ -342,8 +311,6 @@ class Validation:
the list of 'Categories', the list of `Groups`, the `ID` and
the `Name`.
:rtype: `dict`
:raise: A `NameError` exception if no metadata has been found in the
playbook
:Example:

View File

@ -12,19 +12,21 @@
# License for the specific language governing permissions and limitations
# under the License.
#
from validations_libs.logger import getLogger
import json
import os
import sys
import json
import yaml
from validations_libs.ansible import Ansible as v_ansible
from validations_libs.group import Group
from validations_libs.cli.common import Spinner
from validations_libs.validation_logs import ValidationLogs, ValidationLog
from validations_libs import constants
from validations_libs import utils as v_utils
from validations_libs.exceptions import ValidationRunException, ValidationShowException
from validations_libs.ansible import Ansible as v_ansible
from validations_libs.cli.common import Spinner
from validations_libs.exceptions import (ValidationRunException,
ValidationShowException)
from validations_libs.group import Group
from validations_libs.logger import getLogger
from validations_libs.validation_logs import ValidationLog, ValidationLogs
LOG = getLogger(__name__ + ".validation_actions")
@ -43,7 +45,7 @@ class ValidationActions:
"""
def __init__(self, validation_path=constants.ANSIBLE_VALIDATION_DIR,
def __init__(self, base_validation_path=constants.ANSIBLE_VALIDATION_DIR,
groups_path=constants.VALIDATION_GROUPS_INFO,
log_path=constants.VALIDATIONS_LOG_BASEDIR):
"""
@ -56,7 +58,7 @@ class ValidationActions:
:type log_path: ``string``
"""
self.log = getLogger(__name__ + ".ValidationActions")
self.validation_path = validation_path
self.base_validation_path = base_validation_path
self.log_path = log_path
self.groups_path = groups_path
@ -122,7 +124,7 @@ class ValidationActions:
self.log = getLogger(__name__ + ".list_validations")
validations = v_utils.parse_all_validations_on_disk(
path=self.validation_path,
path=self.base_validation_path,
groups=groups,
categories=categories,
products=products,
@ -182,7 +184,7 @@ class ValidationActions:
vlog = ValidationLogs(self.log_path)
data = v_utils.get_validations_data(
validation,
self.validation_path,
self.base_validation_path,
validation_config=validation_config)
if not data:
extra_msg = ""
@ -190,7 +192,7 @@ class ValidationActions:
extra_msg = " or {}".format(constants.COMMUNITY_LIBRARY_DIR)
msg = "Validation {} not found in the path: {}{}".format(
validation,
self.validation_path,
self.base_validation_path,
extra_msg)
raise ValidationShowException(msg)
@ -319,7 +321,7 @@ class ValidationActions:
extra_vars=None, validations_dir=None,
extra_env_vars=None, ansible_cfg=None, quiet=True,
limit_hosts=None, run_async=False,
base_dir=constants.DEFAULT_VALIDATIONS_BASEDIR,
base_dir=constants.DEFAULT_ANSIBLE_BASEDIR,
python_interpreter=None, skip_list=None,
callback_whitelist=None,
output_callback='vf_validation_stdout', ssh_user=None,
@ -418,7 +420,7 @@ class ValidationActions:
self.log = getLogger(__name__ + ".run_validations")
playbooks = []
validations_dir = (validations_dir if validations_dir
else self.validation_path)
else self.base_validation_path)
if group or category or product:
self.log.debug(
"Getting the validations list by:\n"
@ -590,7 +592,7 @@ class ValidationActions:
group_info = []
validations = v_utils.parse_all_validations_on_disk(
path=self.validation_path,
path=self.base_validation_path,
groups=[group[0] for group in group_definitions],
validation_config=validation_config)
@ -676,7 +678,7 @@ class ValidationActions:
raise ValidationShowException("{} output format not supported".format(output_format))
validation_playbooks = v_utils.get_validations_playbook(
path=self.validation_path,
path=self.base_validation_path,
validation_id=validations,
groups=groups,
categories=categories,