Fix parameters management
This patch fixes multiple things: * from validations_libs.validation_actions import show_validations_parameters The generated file was containing the validations names and their existing parameters as returned by the get_validations_parameters method. But the file should only contains the parameters. * from validations_libs.utils import get_validations_playbook This method was supposed to get a list of validations playbooks abs. path either by their names or their groups. This method was working only when giving validation names but not with groups. This patch fixes all the problems above and make the parameters management work again. Change-Id: Iec30cccade1234a127ef406ed13604cce45a72fd Signed-off-by: Gael Chamoulaud (Strider) <gchamoul@redhat.com>
This commit is contained in:
parent
96cf2f27c2
commit
d79245eb82
|
@ -13,8 +13,6 @@
|
|||
# under the License.
|
||||
#
|
||||
|
||||
import json
|
||||
import yaml
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
|
@ -84,7 +82,8 @@ class TestUtils(TestCase):
|
|||
mock_listdir.return_value = ['foo.yaml']
|
||||
mock_isfile.return_value = True
|
||||
result = utils.get_validations_playbook('/foo/playbook', 'foo', 'prep')
|
||||
self.assertEqual(result, ['/foo/playbook/foo.yaml'])
|
||||
self.assertEqual(result, ['/foo/playbook/foo.yaml',
|
||||
'/foo/playbook/foo.yaml'])
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
@mock.patch('os.listdir')
|
||||
|
@ -96,7 +95,7 @@ class TestUtils(TestCase):
|
|||
mock_isfile):
|
||||
mock_listdir.return_value = ['foo.yaml']
|
||||
mock_isfile.return_value = True
|
||||
result = utils.get_validations_playbook('/foo/playbook', 'foo',
|
||||
result = utils.get_validations_playbook('/foo/playbook',
|
||||
'no_group')
|
||||
self.assertEqual(result, [])
|
||||
|
||||
|
@ -139,7 +138,7 @@ class TestUtils(TestCase):
|
|||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
'foo')
|
||||
output = {'foo': {'parameters': {'foo': 'bar'}}}
|
||||
self.assertEqual(result, json.dumps(output, indent=4, sort_keys=True))
|
||||
self.assertEqual(result, output)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
|
@ -150,10 +149,7 @@ class TestUtils(TestCase):
|
|||
validation_name='foo',
|
||||
format='yaml')
|
||||
output = {'foo': {'parameters': {'foo': 'bar'}}}
|
||||
self.assertEqual(result, yaml.safe_dump(output,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False,
|
||||
indent=2))
|
||||
self.assertEqual(result, output)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK2)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
|
@ -171,7 +167,7 @@ class TestUtils(TestCase):
|
|||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
[], ['prep'])
|
||||
output = {'foo': {'parameters': {'foo': 'bar'}}}
|
||||
self.assertEqual(result, json.dumps(output, indent=4, sort_keys=True))
|
||||
self.assertEqual(result, output)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.FAKE_PLAYBOOK)
|
||||
@mock.patch('six.moves.builtins.open')
|
||||
|
@ -179,4 +175,4 @@ class TestUtils(TestCase):
|
|||
|
||||
result = utils.get_validations_parameters(['/foo/playbook/foo.yaml'],
|
||||
[], [])
|
||||
self.assertEqual(result, json.dumps({}))
|
||||
self.assertEqual(result, {})
|
||||
|
|
|
@ -17,6 +17,9 @@ try:
|
|||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
import json
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from validations_libs.tests import fakes
|
||||
|
@ -193,7 +196,8 @@ class TestValidationActions(TestCase):
|
|||
{'parameters': fakes.FAKE_METADATA}}
|
||||
v_actions = ValidationActions()
|
||||
result = v_actions.show_validations_parameters('foo')
|
||||
self.assertEqual(result, {'foo': {'parameters': fakes.FAKE_METADATA}})
|
||||
self.assertEqual(result, json.dumps(mock_get_param.return_value,
|
||||
indent=4, sort_keys=True))
|
||||
|
||||
@mock.patch('validations_libs.validation_logs.ValidationLogs.'
|
||||
'get_logfile_by_validation',
|
||||
|
|
|
@ -14,12 +14,10 @@
|
|||
#
|
||||
import datetime
|
||||
import glob
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import uuid
|
||||
import yaml
|
||||
|
||||
from os.path import join
|
||||
from validations_libs import constants
|
||||
|
@ -66,10 +64,30 @@ def parse_all_validations_on_disk(path, groups=None):
|
|||
return results
|
||||
|
||||
|
||||
def get_validations_playbook(path, validation_id, groups=None):
|
||||
def get_validations_playbook(path, validation_id=None, groups=None):
|
||||
"""
|
||||
Return a list of validations playbook
|
||||
Can be sorted by Groups
|
||||
Get a list of validations playbooks paths either by their names
|
||||
or their groups
|
||||
|
||||
:param path: Path of the validations playbooks
|
||||
:type path: `string`
|
||||
|
||||
:param validation_id: List of validation name
|
||||
:type validation_id: `list`
|
||||
|
||||
:param groups: List of validation group
|
||||
:type groups: `list`
|
||||
|
||||
:return: A list of absolute validations playbooks path
|
||||
|
||||
:exemple:
|
||||
|
||||
>>> path = '/usr/share/validation-playbooks'
|
||||
>>> validation_id = ['512e','check-cpu']
|
||||
>>> groups = None
|
||||
>>> get_validations_playbook(path, validation_id, groups)
|
||||
['/usr/share/ansible/validation-playbooks/512e.yaml',
|
||||
'/usr/share/ansible/validation-playbooks/check-cpu.yaml',]
|
||||
"""
|
||||
if isinstance(groups, six.string_types):
|
||||
groups = [groups]
|
||||
|
@ -77,9 +95,12 @@ def get_validations_playbook(path, validation_id, groups=None):
|
|||
for f in os.listdir(path):
|
||||
pl_path = join(path, f)
|
||||
if os.path.isfile(pl_path):
|
||||
if os.path.splitext(f)[0] in validation_id:
|
||||
if validation_id:
|
||||
if os.path.splitext(f)[0] in validation_id:
|
||||
pl.append(pl_path)
|
||||
if groups:
|
||||
val = Validation(pl_path)
|
||||
if not groups or set(groups).intersection(val.groups):
|
||||
if set(groups).intersection(val.groups):
|
||||
pl.append(pl_path)
|
||||
return pl
|
||||
|
||||
|
@ -142,8 +163,5 @@ def get_validations_parameters(validations_data, validation_name=[],
|
|||
if format not in ['json', 'yaml']:
|
||||
msg = 'Invalid output format, {} is not supported'.format(format)
|
||||
raise RuntimeError(msg)
|
||||
elif format == 'json':
|
||||
return json.dumps(params, indent=4, sort_keys=True)
|
||||
else:
|
||||
return yaml.safe_dump(params, allow_unicode=True,
|
||||
default_flow_style=False, indent=2)
|
||||
|
||||
return params
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
#
|
||||
import logging
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from validations_libs.ansible import Ansible as v_ansible
|
||||
from validations_libs.group import Group
|
||||
|
@ -152,9 +154,31 @@ class ValidationActions(object):
|
|||
params = v_utils.get_validations_parameters(validations, validation,
|
||||
group, format)
|
||||
if download_file:
|
||||
params_only = {}
|
||||
ext = os.splitext(download_file)[1][1:]
|
||||
with open(download_file, 'w') as f:
|
||||
f.write(params)
|
||||
return params
|
||||
for val_name in params.keys():
|
||||
for k, v in params[val_name].get('parameters').items():
|
||||
params_only[k] = v
|
||||
|
||||
if ext == 'json':
|
||||
f.write(json.dumps(params_only,
|
||||
indent=4,
|
||||
sort_keys=True))
|
||||
else:
|
||||
f.write(yaml.safe_dump(params_only,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False,
|
||||
indent=2))
|
||||
if format == 'json':
|
||||
return json.dumps(params,
|
||||
indent=4,
|
||||
sort_keys=True)
|
||||
else:
|
||||
return yaml.safe_dump(params,
|
||||
allow_unicode=True,
|
||||
default_flow_style=False,
|
||||
indent=2)
|
||||
|
||||
def show_history(self, validation_id=None, extension='json',
|
||||
log_path=constants.VALIDATIONS_LOG_BASEDIR):
|
||||
|
|
Loading…
Reference in New Issue