Convert AnsiblePlaybookAction to utility

Not used as mistral action as mistral has been
removed from undercloud.

Change-Id: I93b2c82e9451bd68955040f09ec9eb97db7c0098
This commit is contained in:
ramishra 2021-02-28 10:13:40 +05:30
parent b0bcab90a2
commit 50a24d529b
5 changed files with 453 additions and 676 deletions

View File

@ -12,525 +12,20 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import json
import logging
import multiprocessing
import os
import shutil
import six
from six.moves import configparser
from six.moves import cStringIO as StringIO
import sys
import tempfile
import time
import yaml
from mistral_lib import actions
from oslo_concurrency import processutils
from oslo_rootwrap import subprocess
from tripleo_common.actions import base
from tripleo_common import constants
LOG = logging.getLogger(__name__)
from tripleo_common.utils import ansible as ansible_utils
# FIXME(ramishra) Left behind till we've a new tripleo-common release
def write_default_ansible_cfg(work_dir,
remote_user,
ssh_private_key=None,
transport=None,
base_ansible_cfg='/etc/ansible/ansible.cfg',
override_ansible_cfg=None):
ansible_config_path = os.path.join(work_dir, 'ansible.cfg')
shutil.copy(base_ansible_cfg, ansible_config_path)
modules_path = (
'/root/.ansible/plugins/modules:'
'/usr/share/ansible/tripleo-plugins/modules:'
'/usr/share/ansible/plugins/modules:'
'/usr/share/ansible-modules:'
'{}/library:'
'{}/library'.format(constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
lookups_path = (
'/root/.ansible/plugins/lookup:'
'/usr/share/ansible/tripleo-plugins/lookup:'
'/usr/share/ansible/plugins/lookup:'
'{}/lookup_plugins:'
'{}/lookup_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
callbacks_path = (
'~/.ansible/plugins/callback:'
'/usr/share/ansible/tripleo-plugins/callback:'
'/usr/share/ansible/plugins/callback:'
'{}/callback_plugins:'
'{}/callback_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
callbacks_whitelist = ','.join(['tripleo_dense', 'tripleo_profile_tasks',
'tripleo_states'])
action_plugins_path = (
'~/.ansible/plugins/action:'
'/usr/share/ansible/plugins/action:'
'/usr/share/ansible/tripleo-plugins/action:'
'{}/action_plugins:'
'{}/action_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
filter_plugins_path = (
'~/.ansible/plugins/filter:'
'/usr/share/ansible/plugins/filter:'
'/usr/share/ansible/tripleo-plugins/filter:'
'{}/filter_plugins:'
'{}/filter_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
roles_path = ('%(work_dir)s/roles:'
'/root/.ansible/roles:'
'/usr/share/ansible/tripleo-roles:'
'/usr/share/ansible/roles:'
'/etc/ansible/roles:'
'%(ooo_val_path)s/roles:'
'%(work_dir)s' % {
'work_dir': work_dir,
'ooo_val_path':
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR
})
config = configparser.ConfigParser()
config.read(ansible_config_path)
config.set('defaults', 'retry_files_enabled', 'False')
config.set('defaults', 'roles_path', roles_path)
config.set('defaults', 'library', modules_path)
config.set('defaults', 'callback_plugins', callbacks_path)
config.set('defaults', 'callback_whitelist', callbacks_whitelist)
config.set('defaults', 'stdout_callback', 'tripleo_dense')
config.set('defaults', 'action_plugins', action_plugins_path)
config.set('defaults', 'lookup_plugins', lookups_path)
config.set('defaults', 'filter_plugins', filter_plugins_path)
log_path = os.path.join(work_dir, 'ansible.log')
config.set('defaults', 'log_path', log_path)
if os.path.exists(log_path):
new_path = (log_path + '-' +
datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
os.rename(log_path, new_path)
config.set('defaults', 'forks', str(min(
multiprocessing.cpu_count() * 4, 100)))
config.set('defaults', 'timeout', '30')
config.set('defaults', 'gather_timeout', '30')
# Setup fact cache to improve playbook execution speed
config.set('defaults', 'gathering', 'smart')
config.set('defaults', 'fact_caching', 'jsonfile')
config.set('defaults', 'fact_caching_connection',
'~/.ansible/fact_cache')
# NOTE(mwhahaha): only gather the bare minimum facts because this has
# direct impact on how fast ansible can go.
config.set('defaults', 'gather_subset', '!all,min')
# NOTE(mwhahaha): this significantly affects performation per ansible#73654
config.set('defaults', 'inject_facts_as_vars', 'false')
# Set the pull interval to lower CPU overhead
config.set('defaults', 'internal_poll_interval', '0.01')
# Set the interpreter discovery to auto mode.
config.set('defaults', 'interpreter_python', 'auto')
# Expire facts in the fact cache after 7200s (2h)
config.set('defaults', 'fact_caching_timeout', '7200')
# mistral user has no home dir set, so no place to save a known hosts file
config.set('ssh_connection', 'ssh_args',
'-o UserKnownHostsFile=/dev/null '
'-o StrictHostKeyChecking=no '
'-o ControlMaster=auto '
'-o ControlPersist=30m '
'-o ServerAliveInterval=5 '
'-o ServerAliveCountMax=5 '
'-o PreferredAuthentications=publickey')
config.set('ssh_connection', 'control_path_dir',
os.path.join(work_dir, 'ansible-ssh'))
config.set('ssh_connection', 'retries', '8')
config.set('ssh_connection', 'pipelining', 'True')
# Related to https://github.com/ansible/ansible/issues/22127
config.set('ssh_connection', 'scp_if_ssh', 'True')
# Set connection info in config file so that subsequent/nested ansible
# calls can re-use it
if remote_user:
config.set('defaults', 'remote_user', remote_user)
if ssh_private_key:
config.set('defaults', 'private_key_file', ssh_private_key)
if transport:
config.set('defaults', 'transport', transport)
if override_ansible_cfg:
sio_cfg = StringIO()
sio_cfg.write(override_ansible_cfg)
sio_cfg.seek(0)
config.read_file(sio_cfg)
sio_cfg.close()
with open(ansible_config_path, 'w') as configfile:
config.write(configfile)
return ansible_config_path
class AnsiblePlaybookAction(base.TripleOAction):
"""Executes ansible playbook"""
def __init__(self, **kwargs):
self._kwargs_for_run = kwargs
self._playbook = self._kwargs_for_run.pop('playbook', None)
self.playbook_name = self._kwargs_for_run.pop('playbook_name',
'playbook.yaml')
self.plan_name = self._kwargs_for_run.pop('plan_name', None)
self.limit_hosts = self._kwargs_for_run.pop('limit_hosts', None)
self.module_path = self._kwargs_for_run.pop('module_path', None)
self.remote_user = self._kwargs_for_run.pop('remote_user', None)
self.become = self._kwargs_for_run.pop('become', None)
self.become_user = self._kwargs_for_run.pop('become_user', None)
self.extra_vars = self._kwargs_for_run.pop('extra_vars', None)
if self.extra_vars:
self.extra_vars = json.dumps(self.extra_vars)
self._inventory = self._kwargs_for_run.pop('inventory', None)
self.verbosity = self._kwargs_for_run.pop('verbosity', 5)
self._ssh_private_key = self._kwargs_for_run.pop(
'ssh_private_key', None)
self.flush_cache = self._kwargs_for_run.pop('flush_cache', None)
self.forks = self._kwargs_for_run.pop('forks', None)
self.timeout = self._kwargs_for_run.pop('timeout', None)
self.ssh_extra_args = self._kwargs_for_run.pop('ssh_extra_args', None)
if self.ssh_extra_args:
self.ssh_extra_args = json.dumps(self.ssh_extra_args)
self.ssh_common_args = self._kwargs_for_run.pop(
'ssh_common_args', None)
if self.ssh_common_args:
self.ssh_common_args = json.dumps(self.ssh_common_args)
self.use_openstack_credentials = self._kwargs_for_run.pop(
'use_openstack_credentials', False)
self.tags = self._kwargs_for_run.pop('tags', None)
self.skip_tags = self._kwargs_for_run.pop('skip_tags', None)
self.config_download_args = self._kwargs_for_run.pop(
'config_download_args', None)
self.extra_env_variables = self._kwargs_for_run.pop(
'extra_env_variables', None)
self.queue_name = self._kwargs_for_run.pop('queue_name', None)
self.reproduce_command = self._kwargs_for_run.pop(
'reproduce_command', True)
self.execution_id = self._kwargs_for_run.pop('execution_id', None)
self._work_dir = self._kwargs_for_run.pop(
'work_dir', None)
self.max_message_size = self._kwargs_for_run.pop(
'max_message_size', 1048576)
self.gather_facts = self._kwargs_for_run.pop('gather_facts', False)
self.trash_output = self._kwargs_for_run.pop('trash_output', False)
self.profile_tasks = self._kwargs_for_run.pop('profile_tasks', True)
self.profile_tasks_limit = self._kwargs_for_run.pop(
'profile_tasks_limit', 20)
self.blacklisted_hostnames = self._kwargs_for_run.pop(
'blacklisted_hostnames', [])
self.override_ansible_cfg = self._kwargs_for_run.pop(
'override_ansible_cfg', None)
self.command_timeout = self._kwargs_for_run.pop(
'command_timeout', None)
self._remove_work_dir = False
@property
def work_dir(self):
if self._work_dir:
return self._work_dir
self._work_dir = tempfile.mkdtemp(prefix='ansible-mistral-action')
self._remove_work_dir = True
return self._work_dir
@property
def inventory(self):
if not self._inventory:
return None
# NOTE(flaper87): if it's a path, use it
if (isinstance(self._inventory, six.string_types) and
os.path.exists(self._inventory)):
return self._inventory
if not isinstance(self._inventory, six.string_types):
self._inventory = yaml.safe_dump(self._inventory)
path = os.path.join(self.work_dir, 'inventory.yaml')
# NOTE(flaper87):
# We could probably catch parse errors here
# but if we do, they won't be propagated and
# we should not move forward with the action
# if the inventory generation failed
with open(path, 'w') as inventory:
inventory.write(self._inventory)
self._inventory = path
return path
@property
def playbook(self):
if not self._playbook:
return None
# NOTE(flaper87): if it's a path, use it
if (isinstance(self._playbook, six.string_types) and
os.path.exists(self._playbook)):
return self._playbook
if not isinstance(self._playbook, six.string_types):
self._playbook = yaml.safe_dump(self._playbook)
path = os.path.join(self.work_dir, self.playbook_name)
# NOTE(flaper87):
# We could probably catch parse errors here
# but if we do, they won't be propagated and
# we should not move forward with the action
# if the inventory generation failed
with open(path, 'w') as playbook:
playbook.write(self._playbook)
self._playbook = path
return path
@property
def ssh_private_key(self):
if not self._ssh_private_key:
return None
# NOTE(flaper87): if it's a path, use it
if (isinstance(self._ssh_private_key, six.string_types) and
os.path.exists(self._ssh_private_key)):
os.chmod(self._ssh_private_key, 0o600)
return self._ssh_private_key
path = os.path.join(self.work_dir, 'ssh_private_key')
# NOTE(flaper87):
# We could probably catch parse errors here
# but if we do, they won't be propagated and
# we should not move forward with the action
# if the inventory generation failed
with open(path, 'w') as ssh_key:
ssh_key.write(self._ssh_private_key)
os.chmod(path, 0o600)
self._ssh_private_key = path
return path
def format_message(self, message):
type_ = 'tripleo.ansible-playbook.{}'.format(self.playbook_name)
return {
'body': {
'type': type_,
'payload': {
'message': message,
'plan_name': self.plan_name,
'status': 'RUNNING',
'execution': {'id': self.execution_id}}}}
def post_message(self, queue, message):
"""Posts message to queue
Breaks the message up by maximum message size if needed.
"""
start = 0
# We use 50% of the max message size to account for any overhead
# due to JSON encoding plus the wrapped dict structure from
# format_message.
message_size = int(self.max_message_size * 0.5)
while True:
end = start + message_size
message_part = message[start:end]
start = end
if not message_part:
return
queue.post(self.format_message(message_part))
def run(self, context):
python_version = sys.version_info.major
ansible_playbook_cmd = "ansible-playbook-{}".format(python_version)
if 1 < self.verbosity < 6:
verbosity_option = '-' + ('v' * (self.verbosity - 1))
command = [ansible_playbook_cmd, verbosity_option,
self.playbook]
else:
command = [ansible_playbook_cmd, self.playbook]
# --limit should always take precedence over blacklisted hosts.
# https://bugzilla.redhat.com/show_bug.cgi?id=1857298
if self.limit_hosts:
command.extend(['--limit', self.limit_hosts])
elif self.blacklisted_hostnames:
host_pattern = ':'.join(
['!%s' % h for h in self.blacklisted_hostnames if h])
command.extend(['--limit', host_pattern])
if self.module_path:
command.extend(['--module-path', self.module_path])
if self.become:
command.extend(['--become'])
if self.become_user:
command.extend(['--become-user', self.become_user])
if self.extra_vars:
command.extend(['--extra-vars', self.extra_vars])
if self.flush_cache:
command.extend(['--flush-cache'])
if self.forks:
command.extend(['--forks', self.forks])
if self.ssh_common_args:
command.extend(['--ssh-common-args', self.ssh_common_args])
if self.ssh_extra_args:
command.extend(['--ssh-extra-args', self.ssh_extra_args])
if self.timeout:
command.extend(['--timeout', self.timeout])
if self.inventory:
command.extend(['--inventory-file', self.inventory])
if self.tags:
command.extend(['--tags', self.tags])
if self.skip_tags:
command.extend(['--skip-tags', self.skip_tags])
if self.config_download_args:
command.extend(self.config_download_args)
if self.extra_env_variables:
if not isinstance(self.extra_env_variables, dict):
msg = "extra_env_variables must be a dict"
return actions.Result(error=msg)
for key, value in self.extra_env_variables.items():
self.extra_env_variables[key] = six.text_type(value)
if self.gather_facts:
command.extend(['--gather-facts', self.gather_facts])
try:
ansible_config_path = write_default_ansible_cfg(
self.work_dir,
self.remote_user,
ssh_private_key=self.ssh_private_key,
override_ansible_cfg=self.override_ansible_cfg)
env_variables = {
'HOME': self.work_dir,
'ANSIBLE_LOCAL_TEMP': self.work_dir,
'ANSIBLE_CONFIG': ansible_config_path,
}
if self.profile_tasks:
env_variables.update({
# the whitelist could be collected from multiple
# arguments if we find a use case for it
'ANSIBLE_CALLBACK_WHITELIST':
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
'ANSIBLE_STDOUT_CALLBACK': 'tripleo_dense',
'PROFILE_TASKS_TASK_OUTPUT_LIMIT':
six.text_type(self.profile_tasks_limit),
})
if self.extra_env_variables:
env_variables.update(self.extra_env_variables)
if self.use_openstack_credentials:
security_ctx = context.security
env_variables.update({
'OS_AUTH_URL': security_ctx.auth_uri,
'OS_USERNAME': security_ctx.user_name,
'OS_AUTH_TOKEN': security_ctx.auth_token,
'OS_PROJECT_NAME': security_ctx.project_name})
command = [str(c) for c in command]
if self.reproduce_command:
command_path = os.path.join(self.work_dir,
"ansible-playbook-command.sh")
with open(command_path, 'w') as f:
f.write('#!/bin/bash\n')
f.write('\n')
for var in env_variables:
f.write('%s="%s"\n' % (var, env_variables[var]))
f.write('\n')
f.write(' '.join(command))
f.write(' "$@"')
f.write('\n')
os.chmod(command_path, 0o750)
if self.command_timeout:
command = ['timeout', '-s', 'KILL',
str(self.command_timeout)] + command
if self.queue_name:
zaqar = self.get_messaging_client(context)
queue = zaqar.queue(self.queue_name)
# TODO(d0ugal): We don't have the log errors functionality
# that processutils has, do we need to replicate that somehow?
process = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=False, bufsize=1,
cwd=self.work_dir,
env=env_variables,
universal_newlines=True)
start = time.time()
stdout = []
lines = []
for line in iter(process.stdout.readline, ''):
lines.append(line)
if not self.trash_output:
stdout.append(line)
if time.time() - start > 30:
self.post_message(queue, ''.join(lines))
lines = []
start = time.time()
self.post_message(queue, ''.join(lines))
process.stdout.close()
returncode = process.wait()
# TODO(d0ugal): This bit isn't ideal - as we redirect stderr to
# stdout we don't know the difference. To keep the return dict
# similar there is an empty stderr. We can use the return code
# to determine if there was an error.
return {"stdout": "".join(stdout), "returncode": returncode,
"stderr": ""}
LOG.info('Running ansible-playbook command: %s', command)
stderr, stdout = processutils.execute(
*command, cwd=self.work_dir,
env_variables=env_variables,
log_errors=processutils.LogErrors.ALL)
if self.trash_output:
stdout = ""
stderr = ""
return {"stderr": stderr, "stdout": stdout,
"log_path": os.path.join(self.work_dir, 'ansible.log')}
finally:
# NOTE(flaper87): clean the mess if debug is disabled.
try:
if not self.verbosity and self._remove_work_dir:
shutil.rmtree(self.work_dir)
except Exception as e:
msg = "An error happened while cleaning work directory: " + e
LOG.error(msg)
return actions.Result(error=msg)
return ansible_utils.write_default_ansible_cfg(
work_dir, remote_user,
ssh_private_key, transport,
base_ansible_cfg,
override_ansible_cfg)

View File

@ -37,7 +37,7 @@ from dateutil.parser import parse as dt_parse
from dateutil.tz import tzlocal
from oslo_concurrency import processutils
from oslo_log import log as logging
from tripleo_common.actions import ansible
from tripleo_common.utils import ansible
from tripleo_common.image.base import BaseImageManager
from tripleo_common.image.exception import ImageNotFoundException
from tripleo_common.image.exception import ImageRateLimitedException
@ -658,7 +658,7 @@ class BaseImageUploader(object):
log_f = os.path.join('/var/log', log_name)
try:
LOG.info('Ansible action starting')
ansible.AnsiblePlaybookAction(
ansible.run_ansible_playbook(
playbook=playbook,
work_dir=work_dir,
verbosity=1,
@ -668,7 +668,7 @@ class BaseImageUploader(object):
"stdout_callback=tripleo_dense\n"
"log_path=%s\n" % log_f
)
).run(None)
)
except processutils.ProcessExecutionError as e:
LOG.error(
'%s\n'

View File

@ -1344,8 +1344,8 @@ class TestSkopeoImageUploader(base.TestCase):
'SkopeoImageUploader._copy')
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader._image_exists')
@mock.patch('tripleo_common.actions.'
'ansible.AnsiblePlaybookAction', autospec=True)
@mock.patch('tripleo_common.utils.'
'ansible.run_ansible_playbook', autospec=True)
def test_modify_upload_image(self, mock_ansible, mock_exists, mock_copy,
mock_inspect, mock_auth):
mock_exists.return_value = False
@ -1432,8 +1432,8 @@ class TestSkopeoImageUploader(base.TestCase):
'SkopeoImageUploader._copy')
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader._image_exists')
@mock.patch('tripleo_common.actions.'
'ansible.AnsiblePlaybookAction', autospec=True)
@mock.patch('tripleo_common.utils.'
'ansible.run_ansible_playbook', autospec=True)
def test_modify_image_failed(self, mock_ansible, mock_exists, mock_copy,
mock_inspect, mock_auth):
mock_exists.return_value = False
@ -1445,7 +1445,7 @@ class TestSkopeoImageUploader(base.TestCase):
push_destination = 'localhost:8787'
error = processutils.ProcessExecutionError(
'', 'ouch', -1, 'ansible-playbook')
mock_ansible.return_value.run.side_effect = error
mock_ansible.side_effect = error
self.assertRaises(
ImageUploaderException,
@ -1464,8 +1464,8 @@ class TestSkopeoImageUploader(base.TestCase):
'BaseImageUploader.authenticate')
@mock.patch('tripleo_common.image.image_uploader.'
'BaseImageUploader._inspect')
@mock.patch('tripleo_common.actions.'
'ansible.AnsiblePlaybookAction', autospec=True)
@mock.patch('tripleo_common.utils.'
'ansible.run_ansible_playbook', autospec=True)
def test_modify_image_existing(self, mock_ansible, mock_inspect,
mock_auth):
mock_inspect.return_value = {'Digest': 'a'}

View File

@ -15,26 +15,23 @@
import json
import os
import random
from six.moves import configparser
import shutil
import string
import sys
import tempfile
from unittest import mock
from oslo_concurrency import processutils
from tripleo_common.actions import ansible
from tripleo_common.utils import ansible
from tripleo_common.tests import base
class AnsiblePlaybookActionTest(base.TestCase):
class AnsiblePlaybookTest(base.TestCase):
def setUp(self):
super(AnsiblePlaybookActionTest, self).setUp()
super(AnsiblePlaybookTest, self).setUp()
self.playbook = "myplaybook"
self.limit_hosts = None
self.remote_user = 'fido'
self.become = True
@ -43,33 +40,34 @@ class AnsiblePlaybookActionTest(base.TestCase):
self.verbosity = 2
self.ctx = mock.MagicMock()
self.max_message_size = 1024
self.work_dir = tempfile.mkdtemp('tripleo-ansible')
self.playbook = os.path.join(self.work_dir, "playbook.yaml")
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch('tempfile.mkdtemp')
@mock.patch("tripleo_common.utils.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_run(self, mock_execute, mock_write_cfg):
def test_run(self, mock_execute, mock_write_cfg, mock_work_dir):
mock_execute.return_value = ('', '')
action = ansible.AnsiblePlaybookAction(
mock_work_dir.return_value = self.work_dir
ansible_config_path = os.path.join(self.work_dir,
'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
ansible.run_ansible_playbook(
playbook=self.playbook, limit_hosts=self.limit_hosts,
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=self.verbosity, config_download_args=['--check',
'--diff'])
ansible_config_path = os.path.join(action.work_dir, 'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
verbosity=self.verbosity)
action.run(self.ctx)
mock_write_cfg.assert_called_once_with(action.work_dir,
mock_write_cfg.assert_called_once_with(self.work_dir,
self.remote_user,
ssh_private_key=None,
override_ansible_cfg=None)
pb = os.path.join(action.work_dir, 'playbook.yaml')
pb = os.path.join(self.work_dir, 'playbook.yaml')
env = {
'HOME': action.work_dir,
'ANSIBLE_LOCAL_TEMP': action.work_dir,
'HOME': self.work_dir,
'ANSIBLE_LOCAL_TEMP': self.work_dir,
'ANSIBLE_CONFIG': ansible_config_path,
'ANSIBLE_CALLBACK_WHITELIST':
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
@ -82,36 +80,36 @@ class AnsiblePlaybookActionTest(base.TestCase):
ansible_playbook_cmd, '-v', pb, '--become',
'--become-user',
self.become_user, '--extra-vars', json.dumps(self.extra_vars),
'--check', '--diff', env_variables=env, cwd=action.work_dir,
env_variables=env, cwd=self.work_dir,
log_errors=processutils.LogErrors.ALL)
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch('tempfile.mkdtemp')
@mock.patch("tripleo_common.utils.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_run_with_limit(self, mock_execute, mock_write_cfg):
def test_run_with_limit(self, mock_execute, mock_write_cfg, mock_work_dir):
mock_execute.return_value = ('', '')
mock_work_dir.return_value = self.work_dir
ansible_config_path = os.path.join(self.work_dir,
'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
action = ansible.AnsiblePlaybookAction(
ansible.run_ansible_playbook(
playbook=self.playbook, limit_hosts=['compute35'],
blacklisted_hostnames=['compute21'],
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=self.verbosity, config_download_args=['--check',
'--diff'])
ansible_config_path = os.path.join(action.work_dir, 'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
verbosity=self.verbosity)
action.run(self.ctx)
mock_write_cfg.assert_called_once_with(action.work_dir,
mock_write_cfg.assert_called_once_with(self.work_dir,
self.remote_user,
ssh_private_key=None,
override_ansible_cfg=None)
pb = os.path.join(action.work_dir, 'playbook.yaml')
pb = os.path.join(self.work_dir, 'playbook.yaml')
env = {
'HOME': action.work_dir,
'ANSIBLE_LOCAL_TEMP': action.work_dir,
'HOME': self.work_dir,
'ANSIBLE_LOCAL_TEMP': self.work_dir,
'ANSIBLE_CONFIG': ansible_config_path,
'ANSIBLE_CALLBACK_WHITELIST':
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
@ -124,150 +122,54 @@ class AnsiblePlaybookActionTest(base.TestCase):
ansible_playbook_cmd, '-v', pb, '--limit', "['compute35']",
'--become', '--become-user',
self.become_user, '--extra-vars', json.dumps(self.extra_vars),
'--check', '--diff', env_variables=env, cwd=action.work_dir,
env_variables=env, cwd=self.work_dir,
log_errors=processutils.LogErrors.ALL)
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_run_with_blacklist(self, mock_execute, mock_write_cfg):
mock_execute.return_value = ('', '')
action = ansible.AnsiblePlaybookAction(
playbook=self.playbook, limit_hosts=None,
blacklisted_hostnames=['compute21'],
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=self.verbosity, config_download_args=['--check',
'--diff'])
ansible_config_path = os.path.join(action.work_dir, 'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
action.run(self.ctx)
mock_write_cfg.assert_called_once_with(action.work_dir,
self.remote_user,
ssh_private_key=None,
override_ansible_cfg=None)
pb = os.path.join(action.work_dir, 'playbook.yaml')
env = {
'HOME': action.work_dir,
'ANSIBLE_LOCAL_TEMP': action.work_dir,
'ANSIBLE_CONFIG': ansible_config_path,
'ANSIBLE_CALLBACK_WHITELIST':
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
'ANSIBLE_STDOUT_CALLBACK': 'tripleo_dense',
'PROFILE_TASKS_TASK_OUTPUT_LIMIT': '20',
}
python_version = sys.version_info.major
ansible_playbook_cmd = 'ansible-playbook-{}'.format(python_version)
mock_execute.assert_called_once_with(
ansible_playbook_cmd, '-v', pb, '--limit', '!compute21',
'--become', '--become-user', self.become_user, '--extra-vars',
json.dumps(self.extra_vars), '--check', '--diff',
env_variables=env, cwd=action.work_dir,
log_errors=processutils.LogErrors.ALL)
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_post_message(self, mock_execute, mock_write_cfg):
action = ansible.AnsiblePlaybookAction(
playbook=self.playbook, limit_hosts=self.limit_hosts,
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=self.verbosity,
max_message_size=self.max_message_size)
ansible_config_path = os.path.join(action.work_dir, 'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
message_size = int(self.max_message_size * 0.5)
# Message equal to max_message_size
queue = mock.Mock()
message = ''.join([string.ascii_letters[int(random.random() * 26)]
for x in range(1024)])
action.post_message(queue, message)
self.assertEqual(queue.post.call_count, 2)
self.assertEqual(
queue.post.call_args_list[0],
mock.call(action.format_message(message[:message_size])))
self.assertEqual(
queue.post.call_args_list[1],
mock.call(action.format_message(message[message_size:])))
# Message less than max_message_size
queue = mock.Mock()
message = ''.join([string.ascii_letters[int(random.random() * 26)]
for x in range(512)])
action.post_message(queue, message)
self.assertEqual(queue.post.call_count, 1)
self.assertEqual(
queue.post.call_args_list[0],
mock.call(action.format_message(message)))
# Message double max_message_size
queue = mock.Mock()
message = ''.join([string.ascii_letters[int(random.random() * 26)]
for x in range(2048)])
action.post_message(queue, message)
self.assertEqual(queue.post.call_count, 4)
self.assertEqual(
queue.post.call_args_list[0],
mock.call(action.format_message(message[:message_size])))
self.assertEqual(
queue.post.call_args_list[1],
mock.call(action.format_message(
message[message_size:message_size * 2])))
self.assertEqual(
queue.post.call_args_list[2],
mock.call(action.format_message(
message[message_size * 2:message_size * 3])))
self.assertEqual(
queue.post.call_args_list[3],
mock.call(action.format_message(
message[message_size * 3:2048])))
@mock.patch('tempfile.mkdtemp')
@mock.patch("shutil.rmtree")
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch("tripleo_common.utils.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_work_dir_cleanup(self, mock_execute, mock_write_cfg, mock_rmtree):
def test_work_dir_cleanup(self, mock_execute, mock_write_cfg,
mock_rmtree, mock_work_dir):
mock_execute.return_value = ('', '')
action = ansible.AnsiblePlaybookAction(
playbook=self.playbook, limit_hosts=self.limit_hosts,
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=0)
mock_work_dir.return_value = self.work_dir
ansible_config_path = os.path.join(self.work_dir,
'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
try:
action.run(self.ctx)
mock_rmtree.assert_called_once_with(action.work_dir)
ansible.run_ansible_playbook(
playbook=self.playbook, limit_hosts=self.limit_hosts,
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=0)
mock_rmtree.assert_called_once_with(self.work_dir)
finally:
# Since we mocked the delete we need to manually cleanup.
shutil.rmtree(action.work_dir)
shutil.rmtree(self.work_dir)
@mock.patch("shutil.rmtree")
@mock.patch("tripleo_common.actions.ansible.write_default_ansible_cfg")
@mock.patch("tripleo_common.utils.ansible.write_default_ansible_cfg")
@mock.patch("oslo_concurrency.processutils.execute")
def test_work_dir_no_cleanup(self, mock_execute, mock_write_cfg,
mock_rmtree):
mock_execute.return_value = ('', '')
# Specity a work_dir, this should not be deleted automatically.
# Specity a self.work_dir, this should not be deleted automatically.
work_dir = tempfile.mkdtemp()
ansible_config_path = os.path.join(work_dir,
'ansible.cfg')
mock_write_cfg.return_value = ansible_config_path
try:
action = ansible.AnsiblePlaybookAction(
ansible.run_ansible_playbook(
playbook=self.playbook, limit_hosts=self.limit_hosts,
remote_user=self.remote_user, become=self.become,
become_user=self.become_user, extra_vars=self.extra_vars,
verbosity=self.verbosity, work_dir=work_dir)
action.run(self.ctx)
# verify the rmtree is not called
mock_rmtree.assert_not_called()
finally:
@ -342,7 +244,7 @@ class CopyConfigFileTest(base.TestCase):
override_ansible_cfg = ""
resulting_ansible_config = ansible.write_default_ansible_cfg(
work_dir, None, None, None, base_ansible_cfg=ansible_cfg_path,
work_dir, None, None, base_ansible_cfg=ansible_cfg_path,
override_ansible_cfg=override_ansible_cfg)
ansible_cfg = configparser.ConfigParser()

View File

@ -0,0 +1,380 @@
# Copyright 2017 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import json
import logging
import multiprocessing
import os
import shutil
import six
from six.moves import configparser
from six.moves import cStringIO as StringIO
import sys
import tempfile
import yaml
from oslo_concurrency import processutils
from tripleo_common import constants
LOG = logging.getLogger(__name__)
def write_default_ansible_cfg(work_dir,
remote_user,
ssh_private_key=None,
transport=None,
base_ansible_cfg='/etc/ansible/ansible.cfg',
override_ansible_cfg=None):
ansible_config_path = os.path.join(work_dir, 'ansible.cfg')
shutil.copy(base_ansible_cfg, ansible_config_path)
modules_path = (
'/root/.ansible/plugins/modules:'
'/usr/share/ansible/tripleo-plugins/modules:'
'/usr/share/ansible/plugins/modules:'
'/usr/share/ansible-modules:'
'{}/library:'
'{}/library'.format(constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
lookups_path = (
'/root/.ansible/plugins/lookup:'
'/usr/share/ansible/tripleo-plugins/lookup:'
'/usr/share/ansible/plugins/lookup:'
'{}/lookup_plugins:'
'{}/lookup_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
callbacks_path = (
'~/.ansible/plugins/callback:'
'/usr/share/ansible/tripleo-plugins/callback:'
'/usr/share/ansible/plugins/callback:'
'{}/callback_plugins:'
'{}/callback_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
callbacks_whitelist = ','.join(['tripleo_dense', 'tripleo_profile_tasks',
'tripleo_states'])
action_plugins_path = (
'~/.ansible/plugins/action:'
'/usr/share/ansible/plugins/action:'
'/usr/share/ansible/tripleo-plugins/action:'
'{}/action_plugins:'
'{}/action_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
filter_plugins_path = (
'~/.ansible/plugins/filter:'
'/usr/share/ansible/plugins/filter:'
'/usr/share/ansible/tripleo-plugins/filter:'
'{}/filter_plugins:'
'{}/filter_plugins'.format(
constants.DEFAULT_VALIDATIONS_BASEDIR,
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR))
roles_path = ('%(work_dir)s/roles:'
'/root/.ansible/roles:'
'/usr/share/ansible/tripleo-roles:'
'/usr/share/ansible/roles:'
'/etc/ansible/roles:'
'%(ooo_val_path)s/roles:'
'%(work_dir)s' % {
'work_dir': work_dir,
'ooo_val_path':
constants.DEFAULT_VALIDATIONS_LEGACY_BASEDIR
})
config = configparser.ConfigParser()
config.read(ansible_config_path)
config.set('defaults', 'retry_files_enabled', 'False')
config.set('defaults', 'roles_path', roles_path)
config.set('defaults', 'library', modules_path)
config.set('defaults', 'callback_plugins', callbacks_path)
config.set('defaults', 'callback_whitelist', callbacks_whitelist)
config.set('defaults', 'stdout_callback', 'tripleo_dense')
config.set('defaults', 'action_plugins', action_plugins_path)
config.set('defaults', 'lookup_plugins', lookups_path)
config.set('defaults', 'filter_plugins', filter_plugins_path)
log_path = os.path.join(work_dir, 'ansible.log')
config.set('defaults', 'log_path', log_path)
if os.path.exists(log_path):
new_path = (log_path + '-' +
datetime.now().strftime("%Y-%m-%dT%H:%M:%S"))
os.rename(log_path, new_path)
config.set('defaults', 'forks', str(min(
multiprocessing.cpu_count() * 4, 100)))
config.set('defaults', 'timeout', '30')
config.set('defaults', 'gather_timeout', '30')
# Setup fact cache to improve playbook execution speed
config.set('defaults', 'gathering', 'smart')
config.set('defaults', 'fact_caching', 'jsonfile')
config.set('defaults', 'fact_caching_connection',
'~/.ansible/fact_cache')
# NOTE(mwhahaha): only gather the bare minimum facts because this has
# direct impact on how fast ansible can go.
config.set('defaults', 'gather_subset', '!all,min')
# NOTE(mwhahaha): this significantly affects performation per ansible#73654
config.set('defaults', 'inject_facts_as_vars', 'false')
# Set the pull interval to lower CPU overhead
config.set('defaults', 'internal_poll_interval', '0.01')
# Set the interpreter discovery to auto mode.
config.set('defaults', 'interpreter_python', 'auto')
# Expire facts in the fact cache after 7200s (2h)
config.set('defaults', 'fact_caching_timeout', '7200')
# mistral user has no home dir set, so no place to save a known hosts file
config.set('ssh_connection', 'ssh_args',
'-o UserKnownHostsFile=/dev/null '
'-o StrictHostKeyChecking=no '
'-o ControlMaster=auto '
'-o ControlPersist=30m '
'-o ServerAliveInterval=5 '
'-o ServerAliveCountMax=5 '
'-o PreferredAuthentications=publickey')
config.set('ssh_connection', 'control_path_dir',
os.path.join(work_dir, 'ansible-ssh'))
config.set('ssh_connection', 'retries', '8')
config.set('ssh_connection', 'pipelining', 'True')
# Related to https://github.com/ansible/ansible/issues/22127
config.set('ssh_connection', 'scp_if_ssh', 'True')
if override_ansible_cfg:
sio_cfg = StringIO()
sio_cfg.write(override_ansible_cfg)
sio_cfg.seek(0)
config.read_file(sio_cfg)
sio_cfg.close()
with open(ansible_config_path, 'w') as configfile:
config.write(configfile)
return ansible_config_path
def _get_inventory(inventory, work_dir):
if not inventory:
return None
if (isinstance(inventory, six.string_types) and
os.path.exists(inventory)):
return inventory
if not isinstance(inventory, six.string_types):
inventory = yaml.safe_dump(inventory)
path = os.path.join(work_dir, 'inventory.yaml')
with open(path, 'w') as inv:
inv.write(inventory)
return path
def _get_ssh_private_key(ssh_private_key, work_dir):
if not ssh_private_key:
return None
if (isinstance(ssh_private_key, six.string_types) and
os.path.exists(ssh_private_key)):
os.chmod(ssh_private_key, 0o600)
return ssh_private_key
path = os.path.join(work_dir, 'ssh_private_key')
with open(path, 'w') as ssh_key:
ssh_key.write(ssh_private_key)
os.chmod(path, 0o600)
return path
def _get_playbook(playbook, work_dir):
if not playbook:
return None
if (isinstance(playbook, six.string_types) and
os.path.exists(playbook)):
return playbook
if not isinstance(playbook, six.string_types):
playbook = yaml.safe_dump(playbook)
path = os.path.join(work_dir, 'playbook.yaml')
with open(path, 'w') as pb:
pb.write(playbook)
return path
def run_ansible_playbook(playbook, work_dir=None, **kwargs):
verbosity = kwargs.get('verbosity', 5)
remove_work_dir = False
if not work_dir:
work_dir = tempfile.mkdtemp(prefix='tripleo-ansible')
remove_work_dir = True
playbook = _get_playbook(playbook, work_dir)
python_version = sys.version_info.major
ansible_playbook_cmd = "ansible-playbook-{}".format(python_version)
if 1 < verbosity < 6:
verbosity_option = '-' + ('v' * (verbosity - 1))
command = [ansible_playbook_cmd, verbosity_option,
playbook]
else:
command = [ansible_playbook_cmd, playbook]
limit_hosts = kwargs.get('limit_hosts', None)
if limit_hosts:
command.extend(['--limit', limit_hosts])
module_path = kwargs.get('module_path', None)
if module_path:
command.extend(['--module-path', module_path])
become = kwargs.get('become', False)
if become:
command.extend(['--become'])
become_user = kwargs.get('become_user', None)
if become_user:
command.extend(['--become-user', become_user])
extra_vars = kwargs.get('extra_vars', None)
if extra_vars:
extra_vars = json.dumps(extra_vars)
command.extend(['--extra-vars', extra_vars])
flush_cache = kwargs.get('flush_cache', False)
if flush_cache:
command.extend(['--flush-cache'])
forks = kwargs.get('forks', None)
if forks:
command.extend(['--forks', forks])
ssh_common_args = kwargs.get('ssh_common_args', None)
if ssh_common_args:
command.extend(['--ssh-common-args', ssh_common_args])
ssh_extra_args = kwargs.get('ssh_extra_args', None)
if ssh_extra_args:
command.extend(['--ssh-extra-args', ssh_extra_args])
timeout = kwargs.get('timeout', None)
if timeout:
command.extend(['--timeout', timeout])
inventory = _get_inventory(kwargs.get('inventory', None),
work_dir)
if inventory:
command.extend(['--inventory-file', inventory])
tags = kwargs.get('tags', None)
if tags:
command.extend(['--tags', tags])
skip_tags = kwargs.get('skip_tags', None)
if skip_tags:
command.extend(['--skip-tags', skip_tags])
extra_env_variables = kwargs.get('extra_env_variables', None)
override_ansible_cfg = kwargs.get('override_ansible_cfg', None)
remote_user = kwargs.get('remote_user', None)
ssh_private_key = kwargs.get('ssh_private_key', None)
if extra_env_variables:
if not isinstance(extra_env_variables, dict):
msg = "extra_env_variables must be a dict"
raise RuntimeError(msg)
for key, value in extra_env_variables.items():
extra_env_variables[key] = six.text_type(value)
try:
ansible_config_path = write_default_ansible_cfg(
work_dir,
remote_user,
ssh_private_key=_get_ssh_private_key(
ssh_private_key, work_dir),
override_ansible_cfg=override_ansible_cfg)
env_variables = {
'HOME': work_dir,
'ANSIBLE_LOCAL_TEMP': work_dir,
'ANSIBLE_CONFIG': ansible_config_path,
}
profile_tasks = kwargs.get('profile_tasks', True)
if profile_tasks:
profile_tasks_limit = kwargs.get('profile_tasks_limit', 20)
env_variables.update({
# the whitelist could be collected from multiple
# arguments if we find a use case for it
'ANSIBLE_CALLBACK_WHITELIST':
'tripleo_dense,tripleo_profile_tasks,tripleo_states',
'ANSIBLE_STDOUT_CALLBACK': 'tripleo_dense',
'PROFILE_TASKS_TASK_OUTPUT_LIMIT':
six.text_type(profile_tasks_limit),
})
if extra_env_variables:
env_variables.update(extra_env_variables)
command = [str(c) for c in command]
reproduce_command = kwargs.get('reproduce_command', None)
command_timeout = kwargs.get('command_timeout', None)
trash_output = kwargs.get('trash_output', None)
if reproduce_command:
command_path = os.path.join(work_dir,
"ansible-playbook-command.sh")
with open(command_path, 'w') as f:
f.write('#!/bin/bash\n')
f.write('\n')
for var in env_variables:
f.write('%s="%s"\n' % (var, env_variables[var]))
f.write('\n')
f.write(' '.join(command))
f.write(' "$@"')
f.write('\n')
os.chmod(command_path, 0o750)
if command_timeout:
command = ['timeout', '-s', 'KILL',
str(command_timeout)] + command
LOG.info('Running ansible-playbook command: %s', command)
stderr, stdout = processutils.execute(
*command, cwd=work_dir,
env_variables=env_variables,
log_errors=processutils.LogErrors.ALL)
if trash_output:
stdout = ""
stderr = ""
return {"stderr": stderr, "stdout": stdout,
"log_path": os.path.join(work_dir, 'ansible.log')}
finally:
try:
if remove_work_dir:
shutil.rmtree(work_dir)
except Exception as e:
msg = "An error happened while cleaning work directory: " + e
raise RuntimeError(msg)