Merge pull request #6 from matbu/dev/run/validations

Dev/run/validations
This commit is contained in:
Gaël Chamoulaud 2020-03-09 14:48:40 +01:00 committed by GitHub
commit c3ce36dabf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 825 additions and 9 deletions

View File

@ -4,3 +4,5 @@
pbr>=3.1.1 # Apache-2.0 pbr>=3.1.1 # Apache-2.0
six>=1.11.0 # MIT six>=1.11.0 # MIT
ansible-runner

View File

@ -4,7 +4,7 @@
openstackdocstheme>=1.20.0 # Apache-2.0 openstackdocstheme>=1.20.0 # Apache-2.0
hacking<0.12,>=0.11.0 # Apache-2.0 hacking<0.12,>=0.11.0 # Apache-2.0
mock
coverage!=4.4,>=4.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0
python-subunit>=1.0.0 # Apache-2.0/BSD python-subunit>=1.0.0 # Apache-2.0/BSD
sphinx>=1.8.0,<2.0.0;python_version=='2.7' # BSD sphinx>=1.8.0,<2.0.0;python_version=='2.7' # BSD

354
validations_libs/ansible.py Normal file
View File

@ -0,0 +1,354 @@
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import ansible_runner
import logging
import pwd
import os
import six
import sys
import tempfile
import uuid
import yaml
from six.moves import configparser
from validations_libs import constants
from validations_libs import utils
LOG = logging.getLogger(__name__ + ".ansible")
# NOTE(cloudnull): This is setting the FileExistsError for py2 environments.
# When we no longer support py2 (centos7) this should be
# removed.
try:
FileExistsError = FileExistsError
except NameError:
FileExistsError = OSError
class Ansible(object):
def __init__(self):
self.log = logging.getLogger(__name__ + ".Ansible")
def _playbook_check(self, play, playbook_dir=None):
"""Check if playbook exist"""
if not os.path.exists(play):
play = os.path.join(playbook_dir, play)
if not os.path.exists(play):
raise RuntimeError('No such playbook: {}'.format(play))
self.log.debug('Ansible playbook {} found'.format(play))
return play
def _inventory(self, inventory, ansible_artifact_path):
"""Handle inventory for Ansible"""
if inventory:
if isinstance(inventory, six.string_types):
# check is file path
if os.path.exists(inventory):
return inventory
elif isinstance(inventory, dict):
inventory = yaml.safe_dump(
inventory,
default_flow_style=False
)
return ansible_runner.utils.dump_artifact(
inventory,
ansible_artifact_path,
'hosts'
)
def _creates_ansible_fact_dir(self,
temp_suffix='validagions-libs-ansible'):
"""Creates ansible fact dir"""
ansible_fact_path = os.path.join(
os.path.join(
tempfile.gettempdir(),
temp_suffix
),
'fact_cache'
)
try:
os.makedirs(ansible_fact_path)
return ansible_fact_path
except FileExistsError:
self.log.debug(
'Directory "{}" was not created because it'
' already exists.'.format(
ansible_fact_path
)
)
def _get_extra_vars(self, extra_vars):
"""Manage extra_vars into a dict"""
extravars = dict()
if extra_vars:
if isinstance(extra_vars, dict):
extravars.update(extra_vars)
elif os.path.exists(extra_vars) and os.path.isfile(extra_vars):
with open(extra_vars) as f:
extravars.update(yaml.safe_load(f.read()))
return extravars
def _callback_whitelist(self, callback_whitelist, output_callback):
"""Set callback whitelist"""
if callback_whitelist:
callback_whitelist = ','.join([callback_whitelist,
output_callback])
else:
callback_whitelist = output_callback
return ','.join([callback_whitelist, 'profile_tasks'])
def _ansible_env_var(self, output_callback, ssh_user, workdir, connection,
gathering_policy, module_path, key,
extra_env_variables, ansible_timeout,
callback_whitelist):
"""Handle Ansible env var for Ansible config execution"""
cwd = os.getcwd()
env = os.environ.copy()
env['ANSIBLE_SSH_ARGS'] = (
'-o UserKnownHostsFile={} '
'-o StrictHostKeyChecking=no '
'-o ControlMaster=auto '
'-o ControlPersist=30m '
'-o ServerAliveInterval=64 '
'-o ServerAliveCountMax=1024 '
'-o Compression=no '
'-o TCPKeepAlive=yes '
'-o VerifyHostKeyDNS=no '
'-o ForwardX11=no '
'-o ForwardAgent=yes '
'-o PreferredAuthentications=publickey '
'-T'
).format(os.devnull)
env['ANSIBLE_DISPLAY_FAILED_STDERR'] = True
env['ANSIBLE_FORKS'] = 36
env['ANSIBLE_TIMEOUT'] = ansible_timeout
env['ANSIBLE_GATHER_TIMEOUT'] = 45
env['ANSIBLE_SSH_RETRIES'] = 3
env['ANSIBLE_PIPELINING'] = True
env['ANSIBLE_REMOTE_USER'] = ssh_user
env['ANSIBLE_STDOUT_CALLBACK'] = output_callback
env['ANSIBLE_LIBRARY'] = os.path.expanduser(
'~/.ansible/plugins/modules:'
'{}:{}:'
'/usr/share/ansible/plugins/modules:'
'/usr/share/ceph-ansible/library:'
'{}/library'.format(
os.path.join(workdir, 'modules'),
os.path.join(cwd, 'modules'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_LOOKUP_PLUGINS'] = os.path.expanduser(
'~/.ansible/plugins/lookup:'
'{}:{}:'
'/usr/share/ansible/plugins/lookup:'
'/usr/share/ceph-ansible/plugins/lookup:'
'{}/lookup_plugins'.format(
os.path.join(workdir, 'lookup'),
os.path.join(cwd, 'lookup'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_CALLBACK_PLUGINS'] = os.path.expanduser(
'~/.ansible/plugins/callback:'
'{}:{}:'
'/usr/share/ansible/plugins/callback:'
'/usr/share/ceph-ansible/plugins/callback:'
'{}/callback_plugins'.format(
os.path.join(workdir, 'callback'),
os.path.join(cwd, 'callback'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_ACTION_PLUGINS'] = os.path.expanduser(
'~/.ansible/plugins/action:'
'{}:{}:'
'/usr/share/ansible/plugins/action:'
'/usr/share/ceph-ansible/plugins/actions:'
'{}/action_plugins'.format(
os.path.join(workdir, 'action'),
os.path.join(cwd, 'action'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_FILTER_PLUGINS'] = os.path.expanduser(
'~/.ansible/plugins/filter:'
'{}:{}:'
'/usr/share/ansible/plugins/filter:'
'/usr/share/ceph-ansible/plugins/filter:'
'{}/filter_plugins'.format(
os.path.join(workdir, 'filter'),
os.path.join(cwd, 'filter'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_ROLES_PATH'] = os.path.expanduser(
'~/.ansible/roles:'
'{}:{}:'
'/usr/share/ansible/roles:'
'/usr/share/ceph-ansible/roles:'
'/etc/ansible/roles:'
'{}/roles'.format(
os.path.join(workdir, 'roles'),
os.path.join(cwd, 'roles'),
constants.DEFAULT_VALIDATIONS_BASEDIR
)
)
env['ANSIBLE_CALLBACK_WHITELIST'] = callback_whitelist
env['ANSIBLE_RETRY_FILES_ENABLED'] = False
env['ANSIBLE_HOST_KEY_CHECKING'] = False
env['ANSIBLE_TRANSPORT'] = connection
env['ANSIBLE_CACHE_PLUGIN_TIMEOUT'] = 7200
if connection == 'local':
env['ANSIBLE_PYTHON_INTERPRETER'] = sys.executable
if gathering_policy in ('smart', 'explicit', 'implicit'):
env['ANSIBLE_GATHERING'] = gathering_policy
if module_path:
env['ANSIBLE_LIBRARY'] = ':'.join(
[env['ANSIBLE_LIBRARY'], module_path]
)
try:
user_pwd = pwd.getpwuid(int(os.getenv('SUDO_UID', os.getuid())))
except TypeError:
home = os.path.expanduser('~')
else:
home = user_pwd.pw_dir
env['ANSIBLE_LOG_PATH'] = os.path.join(home, 'ansible.log')
if key:
env['ANSIBLE_PRIVATE_KEY_FILE'] = key
if extra_env_variables:
if not isinstance(extra_env_variables, dict):
msg = "extra_env_variables must be a dict"
self.log.error(msg)
raise SystemError(msg)
else:
env.update(extra_env_variables)
return env
def _encode_envvars(self, env):
"""Encode a hash of values.
:param env: A hash of key=value items.
:type env: `dict`.
"""
for key, value in env.items():
env[key] = six.text_type(value)
else:
return env
def run(self, playbook, inventory, workdir, playbook_dir=None,
connection='smart', output_callback='yaml',
ssh_user='root', key=None, module_path=None,
limit_hosts=None, tags=None, skip_tags=None,
verbosity=0, quiet=False, extra_vars=None,
gathering_policy='smart',
extra_env_variables=None, parallel_run=False,
callback_whitelist=None, ansible_cfg=None,
ansible_timeout=30):
if not playbook_dir:
playbook_dir = workdir
playbook = self._playbook_check(playbook, playbook_dir)
self.log.info(
'Running Ansible playbook: {},'
' Working directory: {},'
' Playbook directory: {}'.format(
playbook,
workdir,
playbook_dir
)
)
ansible_fact_path = self._creates_ansible_fact_dir()
extravars = self._get_extra_vars(extra_vars)
callback_whitelist = self._callback_whitelist(callback_whitelist,
output_callback)
# Set ansible environment variables
env = self._ansible_env_var(output_callback, ssh_user, workdir,
connection, gathering_policy, module_path,
key, extra_env_variables, ansible_timeout,
callback_whitelist)
command_path = None
with utils.TempDirs(dir_path=constants.VALIDATION_RUN_LOG_PATH,
chdir=False,) as ansible_artifact_path:
if 'ANSIBLE_CONFIG' not in env and not ansible_cfg:
ansible_cfg = os.path.join(ansible_artifact_path, 'ansible.cfg')
config = configparser.ConfigParser()
config.add_section('defaults')
config.set('defaults', 'internal_poll_interval', '0.05')
with open(ansible_cfg, 'w') as f:
config.write(f)
env['ANSIBLE_CONFIG'] = ansible_cfg
elif 'ANSIBLE_CONFIG' not in env and ansible_cfg:
env['ANSIBLE_CONFIG'] = ansible_cfg
r_opts = {
'private_data_dir': workdir,
'project_dir': playbook_dir,
'inventory': self._inventory(inventory, ansible_artifact_path),
'envvars': self._encode_envvars(env=env),
'playbook': playbook,
'verbosity': verbosity,
'quiet': quiet,
'extravars': extravars,
'fact_cache': ansible_fact_path,
'fact_cache_type': 'jsonfile',
'artifact_dir': ansible_artifact_path,
'rotate_artifacts': 256
}
if skip_tags:
r_opts['skip_tags'] = skip_tags
if tags:
r_opts['tags'] = tags
if limit_hosts:
r_opts['limit'] = limit_hosts
if parallel_run:
r_opts['directory_isolation_base_path'] = ansible_artifact_path
runner_config = ansible_runner.runner_config.RunnerConfig(**r_opts)
runner_config.prepare()
# NOTE(cloudnull): overload the output callback after prepare
# to define the specific format we want.
# This is only required until PR
# https://github.com/ansible/ansible-runner/pull/387
# is merged and released. After this PR has been
# made available to us, this line should be removed.
runner_config.env['ANSIBLE_STDOUT_CALLBACK'] = \
r_opts['envvars']['ANSIBLE_STDOUT_CALLBACK']
runner = ansible_runner.Runner(config=runner_config)
status, rc = runner.run()
return runner.stdout.name, playbook, rc, status

View File

@ -12,8 +12,6 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
DEFAULT_VALIDATIONS_BASEDIR = '/usr/share/validations-common' DEFAULT_VALIDATIONS_BASEDIR = '/usr/share/validations-common'
ANSIBLE_VALIDATION_DIR = '/usr/share/validations-common/playbooks' ANSIBLE_VALIDATION_DIR = '/usr/share/validations-common/playbooks'
@ -23,3 +21,5 @@ VALIDATION_GROUPS_INFO = '%s/groups.yaml' % DEFAULT_VALIDATIONS_BASEDIR
VALIDATION_GROUPS = ['no-op', VALIDATION_GROUPS = ['no-op',
'prep', 'prep',
'post'] 'post']
VALIDATION_RUN_LOG_PATH = '/var/lib/validations/logs'

View File

@ -0,0 +1,88 @@
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import logging
import os
import six
from validations_libs.ansible import Ansible as v_ansible
from validations_libs import constants
from validations_libs import utils as v_utils
LOG = logging.getLogger(__name__ + ".run")
class Run(object):
def __init__(self):
self.log = logging.getLogger(__name__ + ".Run")
def run_validations(self, playbook, inventory,
group=None, extra_vars=None, validations_dir=None,
validation_name=None, extra_env_var=None,
ansible_cfg=None, quiet=True):
self.log = logging.getLogger(__name__ + ".run_validations")
playbooks = []
if group:
self.log.debug('Getting the validations list by group')
try:
validations = v_utils.parse_all_validations_on_disk(
(validations_dir if validations_dir
else constants.ANSIBLE_VALIDATION_DIR), group)
for val in validations:
playbooks.append(val.get('id') + '.yaml')
except Exception as e:
raise(e)
elif validation_name:
for pb in validation_name:
if pb not in v_utils.get_validation_group_name_list():
playbooks.append(pb + '.yaml')
else:
raise("Please, use '--group' argument instead of "
"'--validation' to run validation(s) by their "
"name(s)."
)
else:
raise RuntimeError("No validations found")
run_ansible = v_ansible()
self.log.debug('Running the validations with Ansible')
results = []
with v_utils.TempDirs(chdir=False) as tmp:
for playbook in playbooks:
stdout_file, _playbook, _rc, _status = run_ansible.run(
workdir=tmp,
playbook=playbook,
playbook_dir=(validations_dir if
validations_dir else
constants.ANSIBLE_VALIDATION_DIR),
parallel_run=True,
inventory=inventory,
output_callback='validation_json',
quiet=quiet,
extra_vars=extra_vars,
extra_env_variables=extra_env_var,
ansible_cfg=ansible_cfg,
gathering_policy='explicit')
results.append({'validation': {
'playbook': _playbook,
'rc_code': _rc,
'status': _status,
'stdout_file': stdout_file
}})
return results

View File

@ -13,6 +13,7 @@
# under the License. # under the License.
# #
from unittest import mock
VALIDATIONS_LIST = [{ VALIDATIONS_LIST = [{
'description': 'My Validation One Description', 'description': 'My Validation One Description',
@ -137,3 +138,7 @@ VALIDATIONS_LOGS_CONTENTS_LIST = [{
}, },
'validation_output': [] 'validation_output': []
}] }]
def fake_ansible_runner_run_return(status='successful', rc=0):
return status, rc

View File

@ -0,0 +1,136 @@
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from unittest import TestCase
from ansible_runner import Runner
from validations_libs.ansible import Ansible
from validations_libs.tests import fakes
from validations_libs import utils
class TestAnsible(TestCase):
def setUp(self):
super(TestAnsible, self).setUp()
self.unlink_patch = mock.patch('os.unlink')
self.addCleanup(self.unlink_patch.stop)
self.unlink_patch.start()
self.run = Ansible()
@mock.patch('os.path.exists', return_value=False)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_check_no_playbook(self, mock_dump_artifact, mock_exists):
self.assertRaises(
RuntimeError,
self.run.run,
'non-existing.yaml',
'localhost,',
'/tmp'
)
mock_exists.assert_called_with('/tmp/non-existing.yaml')
@mock.patch('tempfile.mkdtemp', return_value='/tmp/')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(
Runner,
'run',
return_value=fakes.fake_ansible_runner_run_return(rc=1,
status='failed')
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True,
return_value="/tmp/foo.yaml")
def test_ansible_runner_error(self, mock_stdout, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists,
mock_mkdtemp):
stdout_file, _playbook, _rc, _status = self.run.run('existing.yaml',
'localhost,',
'/tmp')
self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 1, 'failed'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0)
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True,
return_value="/tmp/foo.yaml")
def test_run_success_default(self, mock_stdout, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists,
mock_mkstemp):
stdout_file, _playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
workdir='/tmp'
)
self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0)
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True,
return_value="/tmp/foo.yaml")
def test_run_success_gathering_policy(self, mock_stdout,
mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists,
mock_mkstemp):
stdout_file, _playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
workdir='/tmp',
connection='local',
gathering_policy='smart'
)
self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/')
@mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0)
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True,
return_value="/tmp/foo.yaml")
def test_run_success_local(self, mock_stdout,
mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists,
mock_mkstemp):
stdout_file, _playbook, _rc, _status = self.run.run(
playbook='existing.yaml',
inventory='localhost,',
workdir='/tmp',
connection='local'
)
self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 0, 'successful'))

View File

@ -1,4 +1,4 @@
# Copyright 2018 Red Hat, Inc. # Copyright 2020 Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -13,14 +13,14 @@
# under the License. # under the License.
# #
import mock from unittest import mock
import unittest from unittest import TestCase
from validations_libs.tests import fakes from validations_libs.tests import fakes
from validations_libs.list import List from validations_libs.list import List
class TestValidatorList(unittest.TestCase): class TestValidatorList(TestCase):
def setUp(self): def setUp(self):
super(TestValidatorList, self).setUp() super(TestValidatorList, self).setUp()

View File

@ -0,0 +1,90 @@
# Copyright 2020 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from unittest import TestCase
from validations_libs.tests import fakes
from validations_libs.run import Run
class TestValidatorRun(TestCase):
def setUp(self):
super(TestValidatorRun, self).setUp()
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_success(self, mock_ansible_run,
mock_validation_dir):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {}}]
mock_ansible_run.return_value = ('/tmp/validation/stdout.log',
'foo.yaml', 0, 'successful')
expected_run_return = [
{'validation': {'playbook': 'foo.yaml',
'rc_code': 0,
'status': 'successful',
'stdout_file': '/tmp/validation/stdout.log'}}]
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
run = Run()
run_return = run.run_validations(playbook, inventory,
group=fakes.GROUPS_LIST,
validations_dir='/tmp/foo')
self.assertEqual(run_return, expected_run_return)
@mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_failed(self, mock_ansible_run,
mock_validation_dir):
mock_validation_dir.return_value = [{
'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'],
'id': 'foo',
'name': 'My Validition One Name',
'parameters': {}}]
mock_ansible_run.return_value = ('/tmp/validation/stdout.log',
'foo.yaml', 0, 'failed')
expected_run_return = [
{'validation': {'playbook': 'foo.yaml',
'rc_code': 0,
'status': 'failed',
'stdout_file': '/tmp/validation/stdout.log'}}]
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
run = Run()
run_return = run.run_validations(playbook, inventory,
group=fakes.GROUPS_LIST,
validations_dir='/tmp/foo')
self.assertEqual(run_return, expected_run_return)
def test_validation_run_no_validation(self):
playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml'
run = Run()
self.assertRaises(RuntimeError, run.run_validations, playbook,
inventory)

View File

@ -12,18 +12,119 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import glob import glob
import json
import logging import logging
import os import os
import six
import shutil
import tempfile
import yaml import yaml
from validations_libs import constants from validations_libs import constants
RED = "\033[1;31m"
GREEN = "\033[0;32m"
RESET = "\033[0;0m"
FAILED_VALIDATION = "{}FAILED{}".format(RED, RESET)
PASSED_VALIDATION = "{}PASSED{}".format(GREEN, RESET)
LOG = logging.getLogger(__name__ + ".utils") LOG = logging.getLogger(__name__ + ".utils")
class Pushd(object):
"""Simple context manager to change directories and then return."""
def __init__(self, directory):
"""This context manager will enter and exit directories.
>>> with Pushd(directory='/tmp'):
... with open('file', 'w') as f:
... f.write('test')
:param directory: path to change directory to
:type directory: `string`
"""
self.dir = directory
self.pwd = self.cwd = os.getcwd()
def __enter__(self):
os.chdir(self.dir)
self.cwd = os.getcwd()
return self
def __exit__(self, *args):
if self.pwd != self.cwd:
os.chdir(self.pwd)
class TempDirs(object):
"""Simple context manager to manage temp directories."""
def __init__(self, dir_path=None, dir_prefix='validations', cleanup=True,
chdir=True):
"""This context manager will create, push, and cleanup temp directories.
>>> with TempDirs() as t:
... with open('file', 'w') as f:
... f.write('test')
... print(t)
... os.mkdir('testing')
... with open(os.path.join(t, 'file')) as w:
... print(w.read())
... with open('testing/file', 'w') as f:
... f.write('things')
... with open(os.path.join(t, 'testing/file')) as w:
... print(w.read())
:param dir_path: path to create the temp directory
:type dir_path: `string`
:param dir_prefix: prefix to add to a temp directory
:type dir_prefix: `string`
:param cleanup: when enabled the temp directory will be
removed on exit.
:type cleanup: `boolean`
:param chdir: Change to/from the created temporary dir on enter/exit.
:type chdir: `boolean`
"""
# NOTE(cloudnull): kwargs for tempfile.mkdtemp are created
# because args are not processed correctly
# in py2. When we drop py2 support (cent7)
# these args can be removed and used directly
# in the `tempfile.mkdtemp` function.
tempdir_kwargs = dict()
if dir_path:
tempdir_kwargs['dir'] = dir_path
if dir_prefix:
tempdir_kwargs['prefix'] = dir_prefix
self.dir = tempfile.mkdtemp(**tempdir_kwargs)
self.pushd = Pushd(directory=self.dir)
self.cleanup = cleanup
self.chdir = chdir
def __enter__(self):
if self.chdir:
self.pushd.__enter__()
return self.dir
def __exit__(self, *args):
if self.chdir:
self.pushd.__exit__()
if self.cleanup:
self.clean()
else:
LOG.warning("Not cleaning temporary directory "
"[ %s ]" % self.dir)
def clean(self):
shutil.rmtree(self.dir, ignore_errors=True)
LOG.info("Temporary directory [ %s ] cleaned up" % self.dir)
def parse_all_validations_on_disk(path, groups=None): def parse_all_validations_on_disk(path, groups=None):
results = [] results = []
validations_abspath = glob.glob("{path}/*.yaml".format(path=path)) validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
@ -94,3 +195,43 @@ def get_validation_parameters(validation):
except KeyError: except KeyError:
LOG.debug("No parameters found for this validation") LOG.debug("No parameters found for this validation")
return dict() return dict()
def read_validation_groups_file(groups_file_path=None):
"""Load groups.yaml file and return a dictionary with its contents"""
if not groups_file_path:
groups_file_path = constants.VALIDATION_GROUPS_INFO
if not os.path.exists(groups_file_path):
return []
with open(groups_file_path, 'r') as grps:
contents = yaml.safe_load(grps)
return contents
def get_validation_group_name_list():
"""Get the validation group name list only"""
results = []
groups = read_validation_groups_file()
if groups and isinstance(dict, groups):
for grp_name in six.viewkeys(groups):
results.append(grp_name)
return results
def get_new_validations_logs_on_disk():
"""Return a list of new log execution filenames """
files = []
for root, dirs, filenames in os.walk(constants.VALIDATIONS_LOG_BASEDIR):
files = [
f for f in filenames if not f.startswith('processed')
and os.path.splitext(f)[1] == '.json'
]
return files