Schema validation mechanism

Adds mechanism for verification of the VF playbooks.
Checks required keys in the parsed dictionary and their types.

Should prevent loading of invalid validations at run time.

Tests included.

Signed-off-by: Jiri Podivin <jpodivin@redhat.com>
Change-Id: I61ced540dd58e153ac36037cfa8c1743cb38c44f
This commit is contained in:
Jiri Podivin 2021-06-14 14:27:54 +02:00
parent 4d1df7b0e7
commit 8f5c05c9ee
7 changed files with 182 additions and 16 deletions

View File

@ -25,7 +25,7 @@ import threading
import yaml
from six.moves import configparser
from validations_libs import constants
from validations_libs import constants, utils
LOG = logging.getLogger(__name__ + ".ansible")
@ -52,11 +52,29 @@ class Ansible(object):
self.uuid = uuid
def _playbook_check(self, play, playbook_dir=None):
"""Check if playbook exist"""
"""Check if playbook exists and if it is valid according to
the schema specified in the `validations_utils.constants`
:param play: name of playbook file
:type play: `string`
:param playbook_dir: path to the playbook directory
:type playbook_dir: `PathLike`
:return: path to the validation playbook
:rtype: `string`
"""
if not os.path.exists(play):
play = os.path.join(playbook_dir, play)
if not os.path.exists(play):
raise RuntimeError('No such playbook: {}'.format(play))
try:
utils.verify_validation_schema(playbook=play)
except (ValueError, TypeError, KeyError) as err:
raise RuntimeError(
'{err}: Invalid validation file specified "{play}".'.format(
play=play,
err=err))
self.log.debug('Ansible playbook {} found'.format(play))
return play

View File

@ -27,3 +27,15 @@ VALIDATIONS_LOG_BASEDIR = os.path.join(os.environ.get('HOME'), 'validations')
VALIDATION_ANSIBLE_ARTIFACT_PATH = os.path.join(
VALIDATIONS_LOG_BASEDIR,
'artifacts')
VALIDATION_PLAYBOOK_SCHEMA = {
'hosts': str,
'vars': {
'metadata': {
'name': str,
'description': str,
'groups': list
}
},
'roles': list
}

View File

@ -216,7 +216,7 @@ VALIDATIONS_DATA = {'Description': 'My Validation One Description',
VALIDATIONS_STATS = {'Last execution date': '2019-11-25 13:40:14',
'Number of execution': 'Total: 1, Passed: 0, Failed: 1'}
FAKE_WRONG_PLAYBOOK = [{
FAKE_WRONG_PLAYBOOK_KEY_ERR = [{
'hosts': 'undercloud',
'roles': ['advanced_format_512e_support'],
'vars': {
@ -228,6 +228,31 @@ FAKE_WRONG_PLAYBOOK = [{
}
}]
FAKE_WRONG_PLAYBOOK_TYPE_ERR = [{
'hosts': 'undercloud',
'roles': ['advanced_format_512e_support'],
'vars': {
'metadata': {
'description': 42,
'groups': ['prep', 'pre-deployment'],
'name': 'Advanced Format 512e Support'
}
}
}]
FAKE_WRONG_PLAYBOOK_VALUE_ERR = [{
'hosts': 'undercloud',
'roles': ['advanced_format_512e_support'],
'fizz': 'buzzz',
'vars': {
'metadata': {
'description': 'foo',
'groups': ['prep', 'pre-deployment'],
'name': 'Advanced Format 512e Support'
}
}
}]
FAKE_PLAYBOOK = [{'hosts': 'undercloud',
'roles': ['advanced_format_512e_support'],
'vars': {'metadata': {'description': 'foo',

View File

@ -69,10 +69,12 @@ class TestAnsible(TestCase):
self.assertEqual(fake_uuid, ansible.uuid)
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('os.path.exists', return_value=False)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_check_no_playbook(self, mock_dump_artifact, mock_exists):
def test_check_no_playbook(self, mock_dump_artifact,
mock_exists, mock_schema_ver):
"""
Checks if providing nonexistent playbook raises RuntimeError.
Checks if os.path.exists is called both with name of the play file
@ -197,6 +199,7 @@ class TestAnsible(TestCase):
mock_open.assert_called_once_with('/foo/bar')
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@ -211,7 +214,7 @@ class TestAnsible(TestCase):
@mock.patch('ansible_runner.runner_config.RunnerConfig')
def test_ansible_runner_error(self, mock_config, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists,
mock_open):
mock_open, mock_schema_ver):
_playbook, _rc, _status = self.run.run('existing.yaml',
'localhost,',
@ -219,6 +222,7 @@ class TestAnsible(TestCase):
self.assertEqual((_playbook, _rc, _status),
('existing.yaml', 1, 'failed'))
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@ -229,7 +233,8 @@ class TestAnsible(TestCase):
@mock.patch('ansible_runner.runner_config.RunnerConfig')
def test_run_success_default(self, mock_config, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists,
mock_open):
mock_open, mock_schema_ver):
_playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
@ -238,6 +243,7 @@ class TestAnsible(TestCase):
self.assertEqual((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@ -249,7 +255,8 @@ class TestAnsible(TestCase):
def test_run_success_gathering_policy(self, mock_config,
mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists,
mock_open):
mock_open, mock_schema_ver):
_playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
@ -260,6 +267,7 @@ class TestAnsible(TestCase):
self.assertEqual((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run',
@ -270,8 +278,9 @@ class TestAnsible(TestCase):
@mock.patch('ansible_runner.runner_config.RunnerConfig')
def test_run_success_local(self, mock_config, mock_open,
mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists
):
mock_mkdirs, mock_exists,
mock_schema_ver):
_playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
@ -281,6 +290,7 @@ class TestAnsible(TestCase):
self.assertEqual((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run',
@ -291,8 +301,9 @@ class TestAnsible(TestCase):
@mock.patch('ansible_runner.runner_config.RunnerConfig')
def test_run_success_run_async(self, mock_config, mock_open,
mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists
):
mock_mkdirs, mock_exists,
mock_schema_ver):
_playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
@ -303,6 +314,7 @@ class TestAnsible(TestCase):
self.assertEqual((_playbook, _rc, _status),
('existing.yaml', None, 'unstarted'))
@mock.patch('validations_libs.utils.verify_validation_schema')
@mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@ -317,7 +329,8 @@ class TestAnsible(TestCase):
@mock.patch('os.path.abspath', return_value='/tmp/foo/localhost')
def test_run_specific_log_path(self, moch_path, mock_env, mock_env_var,
mock_config, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists, mock_open):
mock_mkdirs, mock_exists, mock_open,
mock_schema_ver):
_playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',

View File

@ -308,3 +308,52 @@ class TestUtils(TestCase):
"""Test if failure to create artifacts dir raises 'RuntimeError'.
"""
self.assertRaises(RuntimeError, utils.create_artifacts_dir, "/foo/bar")
@mock.patch(
'validations_libs.utils._load_playbook',
return_value=fakes.FAKE_PLAYBOOK)
def test_verify_validations_schema_success(self, mock_load_playbook):
self.assertEqual(True, utils.verify_validation_schema('/foo/bar'))
mock_load_playbook.assert_called_once_with('/foo/bar')
@mock.patch(
'validations_libs.utils._load_playbook',
return_value=fakes.FAKE_PLAYBOOK)
def test_verify_validations_schema_success_strict(self, mock_load_playbook):
self.assertEqual(True, utils.verify_validation_schema('/foo/bar', strict=True))
mock_load_playbook.assert_called_once_with('/foo/bar')
@mock.patch(
'validations_libs.utils._load_playbook',
return_value=fakes.FAKE_WRONG_PLAYBOOK_TYPE_ERR)
def test_verify_validations_schema_fail_type_error(self, mock_load_playbook):
self.assertRaises(
TypeError,
utils.verify_validation_schema,
'/foo/bar')
@mock.patch(
'validations_libs.utils._load_playbook',
return_value=fakes.FAKE_WRONG_PLAYBOOK_VALUE_ERR)
def test_verify_validations_schema_fail_value_error(self, mock_load_playbook):
self.assertRaises(
ValueError,
utils.verify_validation_schema,
'/foo/bar',
strict=True)
mock_load_playbook.assert_called_once_with('/foo/bar')
@mock.patch(
'validations_libs.utils._load_playbook',
return_value=fakes.FAKE_WRONG_PLAYBOOK_KEY_ERR)
def test_verify_validations_schema_fail_key_error(self, mock_load_playbook):
self.assertRaises(
KeyError,
utils.verify_validation_schema,
'/foo/bar')
mock_load_playbook.assert_called_once_with('/foo/bar')

View File

@ -42,7 +42,7 @@ class TestValidation(TestCase):
data = val.get_metadata
self.assertEqual(data, fakes.FAKE_METADATA)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK_KEY_ERR)
@mock.patch('six.moves.builtins.open')
def test_get_metadata_wrong_playbook(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
@ -64,7 +64,7 @@ class TestValidation(TestCase):
data = val.get_vars
self.assertEqual(data, {})
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK_KEY_ERR)
@mock.patch('six.moves.builtins.open')
def test_get_vars_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
@ -88,7 +88,7 @@ class TestValidation(TestCase):
groups = val.groups
self.assertEqual(groups, ['prep', 'pre-deployment'])
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK_KEY_ERR)
@mock.patch('six.moves.builtins.open')
def test_groups_with_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:
@ -117,7 +117,7 @@ class TestValidation(TestCase):
data = val.get_formated_data
self.assertEqual(data, fakes.FORMATED_DATA)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK)
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_WRONG_PLAYBOOK_KEY_ERR)
@mock.patch('six.moves.builtins.open')
def test_get_formated_data_no_metadata(self, mock_open, mock_yaml):
with self.assertRaises(NameError) as exc_mgr:

View File

@ -18,6 +18,7 @@ import logging
import os
import six
import uuid
import yaml
from os.path import join
from validations_libs import constants
@ -410,3 +411,51 @@ def convert_data(data=''):
raise TypeError("The input data should be either a List or a String")
else:
return data
def _load_playbook(playbook_path):
with open(playbook_path, 'r') as p_file:
playbook = yaml.safe_load(p_file.read())
return playbook
def verify_validation_schema(playbook,
schema=constants.VALIDATION_PLAYBOOK_SCHEMA,
strict=False):
"""Verify that the supplied playbook is a proper validation.
Checks for presence of metadata keys and roles key.
Along with their types.
:param playbook: playbook or dictionary from
a parsed playbook to be verified.
:type playbook: `PathLike` or `dict`
:param schema: Schema against which to verify playbook, or a subset of one
:type schema: `dict`
:param strict: Should the verification fail on presence an unknown key
:type strict: `bool`
"""
if type(playbook) is str:
playbook = _load_playbook(playbook)[0]
for key in schema:
if key not in playbook:
raise KeyError()
if type(playbook[key]) is dict:
verify_validation_schema(playbook[key], schema[key])
elif type(playbook[key]) != schema[key]:
raise TypeError()
if strict:
if len(playbook) != len(schema):
raise ValueError()
for key in playbook:
if key not in schema:
LOG.warning((
"Key {key} is not in the defined schema."
).format(key=key))
raise KeyError()
return True