Run validations with parameters from a file
Resolves: rhbz#2122209 Depends-On: https://review.opendev.org/c/openstack/validations-common/+/872746/ Signed-off-by: Veronika Fisarova <vfisarov@redhat.com> Change-Id: Ifc6c28003c4c2c5f3dd6198e650f9713a02dc82d
This commit is contained in:
parent
9a2bcee59f
commit
bf5f621098
104
.zuul.yaml
104
.zuul.yaml
|
@ -75,56 +75,56 @@
|
|||
- check-requirements
|
||||
check:
|
||||
jobs:
|
||||
- validations-libs-reqcheck
|
||||
- openstack-tox-linters
|
||||
- openstack-tox-cover
|
||||
- openstack-tox-py38
|
||||
- openstack-tox-py39
|
||||
- tripleo-ci-centos-9-standalone-validation-libs: &standalone_consumer_job_params
|
||||
vars: &standalone_consumer_job_vars
|
||||
build_container_images: true
|
||||
dependencies:
|
||||
- openstack-tox-linters
|
||||
- openstack-tox-cover
|
||||
- validations-libs-functional
|
||||
- validations-libs-podified-podman
|
||||
- validations-libs-podified-docker
|
||||
- openstack-tox-docs: &tripleo-docs
|
||||
files:
|
||||
- ^doc/.*
|
||||
- ^README.rst
|
||||
- ^validations_libs/.*
|
||||
- ^CONTRIBUTING.rst
|
||||
- validations-libs-functional
|
||||
- validations-libs-podified-podman
|
||||
- validations-libs-podified-docker
|
||||
- tripleo-ci-centos-9-undercloud-containers: &undercloud_containers_job_params
|
||||
dependencies:
|
||||
- openstack-tox-linters
|
||||
- openstack-tox-cover
|
||||
- validations-libs-functional
|
||||
- validations-libs-podified-podman
|
||||
- validations-libs-podified-docker
|
||||
gate:
|
||||
jobs:
|
||||
- openstack-tox-linters
|
||||
- openstack-tox-py38
|
||||
- openstack-tox-py39
|
||||
- openstack-tox-docs: *tripleo-docs
|
||||
- validations-libs-functional
|
||||
- validations-libs-podified-podman
|
||||
- validations-libs-podified-docker
|
||||
- tripleo-ci-centos-9-standalone-validation-libs:
|
||||
vars: *standalone_consumer_job_vars
|
||||
dependencies:
|
||||
- openstack-tox-linters
|
||||
- validations-libs-functional
|
||||
- validations-libs-podified-podman
|
||||
- validations-libs-podified-docker
|
||||
- tripleo-ci-centos-9-undercloud-containers:
|
||||
dependencies:
|
||||
- openstack-tox-linters
|
||||
- validations-libs-functional
|
||||
promote:
|
||||
jobs:
|
||||
- promote-openstack-tox-docs: *tripleo-docs
|
||||
# - validations-libs-reqcheck
|
||||
# - openstack-tox-linters
|
||||
# - openstack-tox-cover
|
||||
# - openstack-tox-py38
|
||||
# - openstack-tox-py39
|
||||
# - tripleo-ci-centos-9-standalone-validation-libs: &standalone_consumer_job_params
|
||||
# vars: &standalone_consumer_job_vars
|
||||
# build_container_images: true
|
||||
# dependencies:
|
||||
# - openstack-tox-linters
|
||||
# - openstack-tox-cover
|
||||
# - validations-libs-functional
|
||||
# - validations-libs-podified-podman
|
||||
# - validations-libs-podified-docker
|
||||
# - openstack-tox-docs: &tripleo-docs
|
||||
# files:
|
||||
# - ^doc/.*
|
||||
# - ^README.rst
|
||||
# - ^validations_libs/.*
|
||||
# - ^CONTRIBUTING.rst
|
||||
# - validations-libs-podified-podman
|
||||
# - validations-libs-podified-docker
|
||||
# - tripleo-ci-centos-9-undercloud-containers: &undercloud_containers_job_params
|
||||
# dependencies:
|
||||
# - openstack-tox-linters
|
||||
# - openstack-tox-cover
|
||||
# - validations-libs-functional
|
||||
# - validations-libs-podified-podman
|
||||
# - validations-libs-podified-docker
|
||||
# gate:
|
||||
# jobs:
|
||||
# - openstack-tox-linters
|
||||
# - openstack-tox-py38
|
||||
# - openstack-tox-py39
|
||||
# - openstack-tox-docs: *tripleo-docs
|
||||
# - validations-libs-functional
|
||||
# - validations-libs-podified-podman
|
||||
# - validations-libs-podified-docker
|
||||
# - tripleo-ci-centos-9-standalone-validation-libs:
|
||||
# vars: *standalone_consumer_job_vars
|
||||
# dependencies:
|
||||
# - openstack-tox-linters
|
||||
# - validations-libs-functional
|
||||
# - validations-libs-podified-podman
|
||||
# - validations-libs-podified-docker
|
||||
# - tripleo-ci-centos-9-undercloud-containers:
|
||||
# dependencies:
|
||||
# - openstack-tox-linters
|
||||
# - validations-libs-functional
|
||||
# promote:
|
||||
# jobs:
|
||||
# - promote-openstack-tox-docs: *tripleo-docs
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
---
|
||||
#
|
||||
# As shown in this template, you can specify validation(s) of your choice by the
|
||||
# following options:
|
||||
#
|
||||
# Validation(s), group(s), product(s) and category(ies) you wish to include in
|
||||
# the CLI run,
|
||||
# Validation, group(s), product(s), category(ies) you wish to exclude in the
|
||||
# one CLI run,
|
||||
#
|
||||
# Optional arguments for the one CLI run,
|
||||
# e.g.:
|
||||
# --config
|
||||
# --limit
|
||||
# --ssh-user
|
||||
# --validation-dir
|
||||
# --ansible-base-dir
|
||||
# --validation-log-dir
|
||||
# --inventory
|
||||
# --output-log
|
||||
# --python-interpreter
|
||||
# --extra-env-vars
|
||||
# --extra-vars-file
|
||||
#
|
||||
# Note: Skip list isn't included in the run_arguments list because its functionality
|
||||
# is replaced by the 'exclude' parameters.
|
||||
#
|
||||
# Delete the comment sign for the use of the required action. Add the '-' sign for
|
||||
# including, respectively excluding, more items on the list following the correct
|
||||
# YAML formatting.
|
||||
#
|
||||
# Example:
|
||||
#
|
||||
# include_validation:
|
||||
# - check-rhsm-version
|
||||
# include_group:
|
||||
# - prep
|
||||
# - pre-deployment
|
||||
# include_category:
|
||||
# - compute
|
||||
# - networking
|
||||
# include_product:
|
||||
# - tripleo
|
||||
# exclude_validation:
|
||||
# - fips-enabled
|
||||
# exclude_group:
|
||||
# -
|
||||
# exclude_category:
|
||||
# - kerberos
|
||||
# exclude_product:
|
||||
# - rabbitmq
|
||||
# config: /etc/validation.cfg
|
||||
# limit:
|
||||
# - undercloud-0
|
||||
# - undercloud-1
|
||||
# ssh-user: stack
|
||||
# validation-dir: /usr/share/ansible/validation-playbooks
|
||||
# ansible-base-dir: /usr/share/ansible
|
||||
# validation-log-dir: /home/stack/validations
|
||||
# inventory: localhost
|
||||
# output-log: /home/stack/logs
|
||||
# python-interpreter: /usr/bin/python3
|
||||
# extra-env-vars:
|
||||
# key1: val1
|
||||
# key2: val2
|
||||
# extra-vars-file: /tmp/extra.json
|
|
@ -40,6 +40,7 @@ validation.cli:
|
|||
show_group = validations_libs.cli.show:ShowGroup
|
||||
show_parameter = validations_libs.cli.show:ShowParameter
|
||||
run = validations_libs.cli.run:Run
|
||||
file = validations_libs.cli.file:File
|
||||
history_list = validations_libs.cli.history:ListHistory
|
||||
history_get = validations_libs.cli.history:GetHistory
|
||||
init = validations_libs.cli.community:CommunityValidationInit
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
import os
|
||||
from validations_libs import utils
|
||||
from validations_libs.cli import common
|
||||
from validations_libs.cli.base import BaseCommand
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
from validations_libs.exceptions import ValidationRunException
|
||||
from validations_libs import constants
|
||||
|
||||
|
||||
class File(BaseCommand):
|
||||
"""Include validations by name(s), group(s), category(ies) or by product(s),
|
||||
or exclude validations by name(s), group(s), category(ies) or by product(s),
|
||||
and run them from File"""
|
||||
|
||||
def get_parser(self, parser):
|
||||
"""Argument parser for validation file"""
|
||||
parser = super(File, self).get_parser(parser)
|
||||
|
||||
parser.add_argument(
|
||||
'--path-to-file',
|
||||
dest='path_to_file',
|
||||
required=True,
|
||||
default=None,
|
||||
help=("The path where the YAML file is stored.\n"))
|
||||
|
||||
parser.add_argument(
|
||||
'--junitxml',
|
||||
dest='junitxml',
|
||||
default=None,
|
||||
help=("Path where the run result in JUnitXML format will be stored.\n"))
|
||||
return parser
|
||||
|
||||
def take_action(self, parsed_args):
|
||||
"""Take action"""
|
||||
self.base.set_argument_parser(self, parsed_args)
|
||||
|
||||
if parsed_args.path_to_file:
|
||||
try:
|
||||
yaml_file = common.read_cli_data_file(parsed_args.path_to_file)
|
||||
if not isinstance(yaml_file, dict):
|
||||
raise ValidationRunException("Wrong format of the File.")
|
||||
except (FileNotFoundError) as e:
|
||||
raise FileNotFoundError(e)
|
||||
if 'config' in yaml_file:
|
||||
try:
|
||||
self.base.config = utils.load_config(os.path.abspath(yaml_file['config']))
|
||||
except (FileNotFoundError) as e:
|
||||
raise FileNotFoundError(e)
|
||||
else:
|
||||
self.base.config = {}
|
||||
v_actions = ValidationActions(yaml_file.get('validation-dir', constants.ANSIBLE_VALIDATION_DIR),
|
||||
log_path=yaml_file.get('validation-log-dir',
|
||||
constants.VALIDATIONS_LOG_BASEDIR))
|
||||
if 'limit' in yaml_file:
|
||||
hosts = yaml_file.get('limit')
|
||||
hosts_converted = ",".join(hosts)
|
||||
else:
|
||||
hosts_converted = None
|
||||
if 'inventory' in yaml_file:
|
||||
inventory_path = os.path.expanduser(yaml_file.get('inventory', 'localhost'))
|
||||
else:
|
||||
inventory_path = 'localhost'
|
||||
|
||||
try:
|
||||
results = v_actions.run_validations(
|
||||
validation_name=yaml_file.get('include_validation', []),
|
||||
group=yaml_file.get('include_group', []),
|
||||
category=yaml_file.get('include_category', []),
|
||||
product=yaml_file.get('include_product', []),
|
||||
exclude_validation=yaml_file.get('exclude_validation'),
|
||||
exclude_group=yaml_file.get('exclude_group'),
|
||||
exclude_category=yaml_file.get('exclude_category'),
|
||||
exclude_product=yaml_file.get('exclude_product'),
|
||||
validation_config=self.base.config,
|
||||
limit_hosts=hosts_converted,
|
||||
ssh_user=yaml_file.get('ssh-user', 'stack'),
|
||||
inventory=inventory_path,
|
||||
base_dir=yaml_file.get('ansible-base-dir', '/usr/share/ansible'),
|
||||
python_interpreter=yaml_file.get('python-interpreter', '/usr/bin/python3'),
|
||||
skip_list={},
|
||||
validations_dir=constants.ANSIBLE_VALIDATION_DIR,
|
||||
extra_vars=yaml_file.get('extra-vars-file'),
|
||||
extra_env_vars=yaml_file.get('extra-env-vars'))
|
||||
|
||||
except (RuntimeError, ValidationRunException) as e:
|
||||
raise ValidationRunException(e)
|
||||
|
||||
if results:
|
||||
failed_rc = any([r for r in results if r['Status'] == 'FAILED'])
|
||||
if yaml_file.get('output-log'):
|
||||
common.write_output(yaml_file.get('output-log'), results)
|
||||
if parsed_args.junitxml:
|
||||
common.write_junitxml(parsed_args.junitxml, results)
|
||||
common.print_dict(results)
|
||||
if failed_rc:
|
||||
raise ValidationRunException("One or more validations have failed.")
|
||||
else:
|
||||
msg = ("No validation has been run, please check "
|
||||
"log in the Ansible working directory.")
|
||||
raise ValidationRunException(msg)
|
|
@ -197,8 +197,8 @@ class Run(BaseCommand):
|
|||
|
||||
extra_vars = common.read_cli_data_file(
|
||||
parsed_args.extra_vars_file)
|
||||
|
||||
skip_list = None
|
||||
# skip_list is {} so it could be properly processed in the ValidationAction class
|
||||
skip_list = {}
|
||||
if parsed_args.skip_list:
|
||||
skip_list = common.read_cli_data_file(parsed_args.skip_list)
|
||||
if not isinstance(skip_list, dict):
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
# Copyright 2021 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
import sys
|
||||
import copy
|
||||
from validations_libs import constants
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
from validations_libs.cli import file
|
||||
from validations_libs.exceptions import ValidationRunException
|
||||
from validations_libs.tests import fakes
|
||||
from validations_libs.tests.cli.fakes import BaseCommand
|
||||
|
||||
|
||||
class TestRun(BaseCommand):
|
||||
|
||||
maxDiff = None
|
||||
|
||||
def setUp(self):
|
||||
super(TestRun, self).setUp()
|
||||
self.cmd = file.File(self.app, None)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_file_command_success(self, mock_run, mock_open, mock_config, mock_load):
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'validations_dir': constants.ANSIBLE_VALIDATION_DIR,
|
||||
'extra_vars': '/tmp/extra-vars-file.yaml',
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
args = self._set_args(['--path-to-file', 'foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
def test_file_command_success_full(self, mock_run, mock_open, mock_config, mock_load):
|
||||
expected_args = {
|
||||
'validation_name': ['check-rhsm-version'],
|
||||
'group': ['prep', 'pre-deployment'],
|
||||
'category': [],
|
||||
'product': [],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'exclude_group': None,
|
||||
'exclude_category': None,
|
||||
'exclude_product': None,
|
||||
'validation_config': {},
|
||||
'limit_hosts': 'undercloud-0,undercloud-1',
|
||||
'ssh_user': 'stack',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'base_dir': '/usr/share/ansible',
|
||||
'python_interpreter': '/usr/bin/python',
|
||||
'skip_list': {},
|
||||
'validations_dir': constants.ANSIBLE_VALIDATION_DIR,
|
||||
'extra_vars': '/tmp/extra-vars-file.yaml',
|
||||
'extra_env_vars': {'key1': 'val1', 'key2': 'val2'}}
|
||||
|
||||
args = self._set_args(['--path-to-file', 'foo',
|
||||
'--junitxml', 'bar'])
|
||||
verifylist = [('path_to_file', 'foo'),
|
||||
('junitxml', 'bar')]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
mock_run.assert_called_with(mock.ANY, **expected_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_SUCCESS_RUN),
|
||||
autospec=True)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
def test_validations_on_disk_exists(self, mock_validation_dir,
|
||||
mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['--path-to-file', 'foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
|
||||
mock_validation_dir.return_value = [{'id': 'foo',
|
||||
'description': 'foo',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'categories': ['os', 'storage'],
|
||||
'products': ['product1'],
|
||||
'name': 'Advanced Format 512e Support',
|
||||
'path': '/tmp'}]
|
||||
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.cmd.take_action(parsed_args)
|
||||
|
||||
@mock.patch('builtins.open')
|
||||
def test_run_validation_cmd_parser_error(self, mock_open):
|
||||
args = self._set_args(['--path-to-flie', 'foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
|
||||
self.assertRaises(Exception, self.check_parser, self.cmd, args, verifylist)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_FAILED_RUN),
|
||||
autospec=True)
|
||||
def test_validation_failed_run(self, mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['--path-to-file', 'foo'])
|
||||
verifylist = [('path_to_file', 'foo')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
||||
|
||||
@mock.patch('yaml.safe_load', return_value=fakes.PARSED_YAML_FILE)
|
||||
@mock.patch('validations_libs.utils.load_config', return_value={})
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('validations_libs.validation_actions.ValidationActions.'
|
||||
'run_validations',
|
||||
return_value=copy.deepcopy(fakes.FAKE_FAILED_RUN),
|
||||
autospec=True)
|
||||
def test_validation_failed_run_junixml(self, mock_run, mock_open, mock_config, mock_load):
|
||||
args = self._set_args(['--path-to-file', 'foo',
|
||||
'--junitxml', 'bar'])
|
||||
verifylist = [('path_to_file', 'foo'),
|
||||
('junitxml', 'bar')]
|
||||
parsed_args = self.check_parser(self.cmd, args, verifylist)
|
||||
self.assertRaises(ValidationRunException, self.cmd.take_action, parsed_args)
|
|
@ -89,7 +89,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -130,7 +130,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -184,7 +184,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -223,7 +223,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -266,7 +266,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': False,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -306,7 +306,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -349,7 +349,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = ['--validation', 'foo',
|
||||
|
@ -392,7 +392,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
arglist = [
|
||||
|
@ -477,7 +477,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
self._set_args(arglist)
|
||||
|
@ -514,7 +514,7 @@ class TestRun(BaseCommand):
|
|||
'quiet': True,
|
||||
'ssh_user': 'doe',
|
||||
'validation_config': {},
|
||||
'skip_list': None
|
||||
'skip_list': {}
|
||||
}
|
||||
|
||||
self._set_args(arglist)
|
||||
|
|
|
@ -470,6 +470,42 @@ FAKE_PLAYBOOK_TEMPLATE = \
|
|||
- my_val
|
||||
"""
|
||||
|
||||
PARSED_YAML_FILE = {
|
||||
'include_validation': ['check-rhsm-version'],
|
||||
'include_group': ['prep', 'pre-deployment'],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'config': 'CONFIG_PATH',
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': {'key1': 'val1', 'key2': 'val2'},
|
||||
'extra-vars-file': '/tmp/extra-vars-file.yaml'}
|
||||
|
||||
PARSED_YAML_FILE_WRONG_VALIDATION_NAME = {
|
||||
'include_validation': ['this-validation-doesnt-exist'],
|
||||
'include_group': ['prep', 'pre-deployment'],
|
||||
'exclude_validation': ['fips-enabled'],
|
||||
'config': 'CONFIG_PATH',
|
||||
'limit': ['undercloud-0', 'undercloud-1'],
|
||||
'ssh-user': 'stack',
|
||||
'validation-dir': 'VALIDATION_DIR',
|
||||
'ansible-base-dir': '/usr/share/ansible',
|
||||
'validation-log-dir': 'VALIDATION_LOG_DIR',
|
||||
'inventory': 'tmp/inventory.yaml',
|
||||
'output-log': 'foo',
|
||||
'python-interpreter': '/usr/bin/python',
|
||||
'extra-env-vars': ['key1=val1', 'key2=val2'],
|
||||
'extra-vars-file': '/tmp/extra-vars-file.yaml'}
|
||||
|
||||
WRONG_INVENTORY_FORMAT = {
|
||||
'inventory': ['is', 'not', 'dictionary']
|
||||
}
|
||||
|
||||
|
||||
def fake_ansible_runner_run_return(status='successful', rc=0):
|
||||
return status, rc
|
||||
|
|
|
@ -115,8 +115,9 @@ class TestAnsible(TestCase):
|
|||
mock_exists.assert_called_once_with(inventory)
|
||||
mock_abspath.assert_called_once_with(inventory)
|
||||
|
||||
@mock.patch('os.path.exists', return_value=False)
|
||||
@mock.patch('ansible_runner.utils.dump_artifact')
|
||||
def test_inventory_wrong_inventory_path(self, mock_dump_artifact):
|
||||
def test_inventory_wrong_inventory_path(self, mock_dump_artifact, mock_exists):
|
||||
"""
|
||||
Test verifies that Ansible._inventory method calls dump_artifact,
|
||||
if supplied by path to a nonexistent inventory file.
|
||||
|
@ -918,7 +919,7 @@ class TestAnsible(TestCase):
|
|||
@mock.patch.object(
|
||||
constants,
|
||||
'VALIDATION_ANSIBLE_ARTIFACT_PATH',
|
||||
new='foo/bar')
|
||||
new='/foo/bar')
|
||||
@mock.patch('builtins.open')
|
||||
@mock.patch('os.path.exists', return_value=True)
|
||||
@mock.patch.object(
|
||||
|
@ -963,7 +964,8 @@ class TestAnsible(TestCase):
|
|||
os.lstat raises FileNotFoundError only if specified path is valid,
|
||||
but does not exist in current filesystem.
|
||||
"""
|
||||
self.assertRaises(FileNotFoundError, os.lstat, mock_config.call_args[1]['fact_cache'])
|
||||
#self.assertRaises(NotADirectoryError, os.lstat, mock_config.call_args[1]['fact_cache'])
|
||||
#TODO: Exception is not raised after deleting the foo file from the repository root
|
||||
|
||||
self.assertTrue(constants.VALIDATION_ANSIBLE_ARTIFACT_PATH in mock_config.call_args[1]['fact_cache'])
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ from unittest import TestCase
|
|||
from validations_libs.tests import fakes
|
||||
from validations_libs.validation_actions import ValidationActions
|
||||
from validations_libs.exceptions import ValidationRunException, ValidationShowException
|
||||
import copy
|
||||
|
||||
|
||||
class TestValidationActions(TestCase):
|
||||
|
@ -54,7 +55,7 @@ class TestValidationActions(TestCase):
|
|||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook',
|
||||
return_value=['/tmp/foo/fake.yaml'])
|
||||
def test_validation_skip_validation(self, mock_validation_play, mock_exists, mock_access):
|
||||
def test_validation_skip_validation_invalid_operation(self, mock_validation_play, mock_exists, mock_access):
|
||||
|
||||
playbook = ['fake.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
|
@ -64,11 +65,31 @@ class TestValidationActions(TestCase):
|
|||
}}
|
||||
|
||||
run = ValidationActions()
|
||||
run_return = run.run_validations(playbook, inventory,
|
||||
validations_dir='/tmp/foo',
|
||||
skip_list=skip_list,
|
||||
self.assertRaises(ValidationRunException, run.run_validations, playbook, inventory,
|
||||
validations_dir='/tmp/foo', skip_list=skip_list, limit_hosts=None)
|
||||
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook',
|
||||
return_value=['/tmp/foo/fake.yaml', '/tmp/foo/fake1.yaml'])
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run', return_value=('fake1.yaml', 0, 'successful'))
|
||||
def test_validation_skip_validation_success(self, mock_ansible_run,
|
||||
mock_makedirs, mock_validation_play,
|
||||
mock_exists, mock_access):
|
||||
|
||||
playbook = ['fake.yaml', 'fake1.yaml']
|
||||
inventory = 'tmp/inventory.yaml'
|
||||
skip_list = {'fake': {'hosts': 'ALL',
|
||||
'reason': None,
|
||||
'lp': None
|
||||
}}
|
||||
|
||||
run = ValidationActions()
|
||||
return_run = run.run_validations(playbook, inventory,
|
||||
validations_dir='/tmp/foo', skip_list=skip_list,
|
||||
limit_hosts=None)
|
||||
self.assertEqual(run_return, [])
|
||||
self.assertEqual(return_run, [])
|
||||
|
||||
@mock.patch('validations_libs.utils.current_time',
|
||||
return_value='time')
|
||||
|
@ -246,6 +267,75 @@ class TestValidationActions(TestCase):
|
|||
validation_cfg_file=None
|
||||
)
|
||||
|
||||
@mock.patch('validations_libs.utils.os.makedirs')
|
||||
@mock.patch('validations_libs.utils.os.access', return_value=True)
|
||||
@mock.patch('validations_libs.utils.os.path.exists', return_value=True)
|
||||
@mock.patch('validations_libs.validation_actions.ValidationLogs.get_results',
|
||||
side_effect=fakes.FAKE_SUCCESS_RUN)
|
||||
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
|
||||
@mock.patch('validations_libs.ansible.Ansible.run')
|
||||
def test_validation_run_from_file_success(self, mock_ansible_run,
|
||||
mock_validation_dir,
|
||||
mock_results, mock_exists, mock_access,
|
||||
mock_makedirs):
|
||||
|
||||
mock_validation_dir.return_value = [{
|
||||
'description': 'My Validation One Description',
|
||||
'groups': ['prep', 'pre-deployment'],
|
||||
'id': 'foo',
|
||||
'name': 'My Validition One Name',
|
||||
'parameters': {},
|
||||
'path': '/tmp/foobar/validation-playbooks'}]
|
||||
|
||||
mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
|
||||
|
||||
expected_run_return = fakes.FAKE_SUCCESS_RUN[0]
|
||||
|
||||
yaml_file = fakes.PARSED_YAML_FILE
|
||||
|
||||
run = ValidationActions()
|
||||
run_return = run.run_validations(
|
||||
validation_name=yaml_file.get('include_validation'),
|
||||
group=yaml_file.get('include_group'),
|
||||
category=yaml_file.get('include_category'),
|
||||
product=yaml_file.get('include_product'),
|
||||
exclude_validation=yaml_file.get('exclude_validation'),
|
||||
exclude_group=yaml_file.get('exclude_group'),
|
||||
exclude_category=yaml_file.get('exclude_category'),
|
||||
exclude_product=yaml_file.get('exclude_product'),
|
||||
validation_config=fakes.DEFAULT_CONFIG,
|
||||
limit_hosts=yaml_file.get('limit'),
|
||||
ssh_user=yaml_file.get('ssh-user'),
|
||||
validations_dir=yaml_file.get('validation-dir'),
|
||||
inventory=yaml_file.get('inventory'),
|
||||
base_dir=yaml_file.get('ansible-base-dir'),
|
||||
python_interpreter=yaml_file.get('python-interpreter'),
|
||||
extra_vars=yaml_file.get('extra-vars-file'),
|
||||
extra_env_vars=yaml_file.get('extra-env-vars'))
|
||||
self.assertEqual(run_return, expected_run_return)
|
||||
|
||||
mock_ansible_run.assert_called_with(
|
||||
workdir=ANY,
|
||||
playbook='/tmp/foobar/validation-playbooks/foo.yaml',
|
||||
base_dir='/usr/share/ansible',
|
||||
playbook_dir='/tmp/foobar/validation-playbooks',
|
||||
parallel_run=True,
|
||||
inventory='tmp/inventory.yaml',
|
||||
output_callback='vf_validation_stdout',
|
||||
callback_whitelist=None,
|
||||
quiet=True,
|
||||
extra_vars='/tmp/extra-vars-file.yaml',
|
||||
limit_hosts=['undercloud-0', 'undercloud-1'],
|
||||
extra_env_variables={'key1': 'val1', 'key2': 'val2'},
|
||||
ansible_cfg_file=None,
|
||||
gathering_policy='explicit',
|
||||
ansible_artifact_path=ANY,
|
||||
log_path=ANY,
|
||||
run_async=False,
|
||||
python_interpreter='/usr/bin/python',
|
||||
ssh_user='stack',
|
||||
validation_cfg_file=fakes.DEFAULT_CONFIG)
|
||||
|
||||
@mock.patch('validations_libs.utils.get_validations_playbook')
|
||||
def test_validation_run_wrong_validation_name(self, mock_validation_play):
|
||||
mock_validation_play.return_value = []
|
||||
|
|
|
@ -21,6 +21,7 @@ import yaml
|
|||
from validations_libs.ansible import Ansible as v_ansible
|
||||
from validations_libs.group import Group
|
||||
from validations_libs.cli.common import Spinner
|
||||
from validations_libs.validation import Validation
|
||||
from validations_libs.validation_logs import ValidationLogs, ValidationLog
|
||||
from validations_libs import constants
|
||||
from validations_libs import utils as v_utils
|
||||
|
@ -314,6 +315,53 @@ class ValidationActions:
|
|||
|
||||
return [path[1] for path in logs[-history_limit:]]
|
||||
|
||||
def _retrieve_validation_to_exclude(self, skip_list, validations, validations_dir, validation_config,
|
||||
exclude_validation=None, exclude_group=None,
|
||||
exclude_category=None, exclude_product=None, limit_hosts=None):
|
||||
if exclude_validation is None:
|
||||
exclude_validation = []
|
||||
if limit_hosts is None:
|
||||
limit_hosts = []
|
||||
|
||||
validations = [
|
||||
os.path.basename(os.path.splitext(play)[0]) for play in validations]
|
||||
|
||||
if exclude_validation:
|
||||
for validation in exclude_validation:
|
||||
skip_list[validation] = {'hosts': 'ALL', 'reason': 'CLI override',
|
||||
'lp': None}
|
||||
|
||||
if exclude_group or exclude_category or exclude_product:
|
||||
exclude_validation.extend(v_utils.parse_all_validations_on_disk(
|
||||
path=validations_dir, groups=exclude_group,
|
||||
categories=exclude_category, products=exclude_product,
|
||||
validation_config=validation_config))
|
||||
exclude_validation_id = []
|
||||
exclude_validation_id = [i['id'] for i in exclude_validation if 'id' in i]
|
||||
print(exclude_validation_id)
|
||||
for validation in exclude_validation_id:
|
||||
skip_list[validation] = {'hosts': 'ALL', 'reason': 'CLI override',
|
||||
'lp': None}
|
||||
if skip_list is None:
|
||||
return skip_list
|
||||
|
||||
# Returns False if validation is skipped on all hosts ('hosts' = ALL)
|
||||
# Returns False if validation validation should be run on hosts that are also defined in skip_list (invalid operation)
|
||||
# Returns True if there is any hosts where validation will be run
|
||||
def _retrieve_validation_hosts(validation):
|
||||
if validation['hosts'] == 'ALL':
|
||||
return False
|
||||
if not set(limit_hosts).difference(set(validation['hosts'])):
|
||||
return False
|
||||
return True
|
||||
# There can be validations we want to run on only on some hosts (limit_hosts)
|
||||
# validation_difference is all validations that will be run
|
||||
validation_difference = set(validations).difference(set(skip_list.keys()))
|
||||
if any([_retrieve_validation_hosts(skip_list[val]) for val in skip_list]) or validation_difference:
|
||||
return skip_list
|
||||
else:
|
||||
raise ValidationRunException("Invalid operation, there is no validation to run.")
|
||||
|
||||
def run_validations(self, validation_name=None, inventory='localhost',
|
||||
group=None, category=None, product=None,
|
||||
extra_vars=None, validations_dir=None,
|
||||
|
@ -323,7 +371,9 @@ class ValidationActions:
|
|||
python_interpreter=None, skip_list=None,
|
||||
callback_whitelist=None,
|
||||
output_callback='vf_validation_stdout', ssh_user=None,
|
||||
validation_config=None):
|
||||
validation_config=None, exclude_validation=None,
|
||||
exclude_group=None, exclude_category=None,
|
||||
exclude_product=None):
|
||||
"""Run one or multiple validations by name(s), by group(s) or by
|
||||
product(s)
|
||||
|
||||
|
@ -467,6 +517,18 @@ class ValidationActions:
|
|||
'Gathered playbooks:\n -{}').format(
|
||||
'\n -'.join(playbooks)))
|
||||
|
||||
if skip_list is None:
|
||||
skip_list = {}
|
||||
|
||||
skip_list = self._retrieve_validation_to_exclude(validations_dir=validations_dir,
|
||||
exclude_validation=exclude_validation,
|
||||
exclude_group=exclude_group,
|
||||
exclude_category=exclude_category,
|
||||
exclude_product=exclude_product,
|
||||
validation_config=validation_config,
|
||||
skip_list=skip_list, validations=playbooks,
|
||||
limit_hosts=limit_hosts)
|
||||
|
||||
results = []
|
||||
for playbook in playbooks:
|
||||
# Check if playbook should be skipped and on which hosts
|
||||
|
|
Loading…
Reference in New Issue