Merge "Remove mistral from the package_update_* workflows"

This commit is contained in:
Zuul 2020-02-19 00:27:00 +00:00 committed by Gerrit Code Review
commit 9bb806ad74
5 changed files with 117 additions and 44 deletions

View File

@ -58,7 +58,9 @@ class TestRunAnsiblePlaybook(TestCase):
@mock.patch('os.path.exists', return_value=False)
@mock.patch('tripleoclient.utils.run_command_and_log')
def test_no_playbook(self, mock_run, mock_exists):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_no_playbook(self, mock_dump_artifact, mock_run, mock_exists):
self.assertRaises(
RuntimeError,
utils.run_ansible_playbook,
@ -77,7 +79,10 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return(rc=1)
)
def test_subprocess_error(self, mock_run, mock_mkdirs, mock_exists,
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_subprocess_error(self, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists,
mock_mkstemp):
with self.assertRaises(AnsibleRunnerException):
utils.run_ansible_playbook(
@ -94,8 +99,10 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return()
)
def test_run_success_default(self, mock_run, mock_mkdirs, mock_exists,
mock_mkstemp):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_run_success_default(self, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists, mock_mkstemp):
retcode, output = utils.run_ansible_playbook(
playbook='existing.yaml',
inventory='localhost,',
@ -110,7 +117,10 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return()
)
def test_run_success_ansible_cfg(self, mock_run, mock_mkdirs, mock_exists):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_run_success_ansible_cfg(self, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists):
retcode, output = utils.run_ansible_playbook(
playbook='existing.yaml',
inventory='localhost,',
@ -126,8 +136,11 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return()
)
def test_run_success_connection_local(self, mock_run, mock_mkdirs,
mock_exists, mock_mkstemp):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_run_success_connection_local(self, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists,
mock_mkstemp):
retcode, output = utils.run_ansible_playbook(
playbook='existing.yaml',
inventory='localhost,',
@ -144,8 +157,11 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return()
)
def test_run_success_gathering_policy(self, mock_run, mock_exists,
mock_mkstemp, mock_makedirs):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_run_success_gathering_policy(self, mock_dump_artifact, mock_run,
mock_exists, mock_mkstemp,
mock_makedirs):
retcode, output = utils.run_ansible_playbook(
playbook='existing.yaml',
inventory='localhost,',
@ -163,8 +179,10 @@ class TestRunAnsiblePlaybook(TestCase):
'run',
return_value=fakes.fake_ansible_runner_run_return()
)
def test_run_success_extra_vars(self, mock_run, mock_exists, mock_mkstemp,
mock_makedirs):
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_run_success_extra_vars(self, mock_dump_artifact, mock_run,
mock_exists, mock_mkstemp, mock_makedirs):
arglist = {
'var_one': 'val_one',
}

View File

@ -912,7 +912,10 @@ class TestDeployUndercloud(TestPluginV1):
@mock.patch('tripleoclient.utils.wait_for_stack_ready', return_value=True)
@mock.patch('tripleoclient.v1.tripleo_deploy.Deploy.'
'_set_default_plan')
def test_take_action_standalone(self, mock_def_plan, mock_poll,
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml")
def test_take_action_standalone(self, mock_dump_artifact,
mock_def_plan, mock_poll,
mock_environ, mock_geteuid, mock_puppet,
mock_killheat, mock_launchheat,
mock_download, mock_tht,

View File

@ -516,11 +516,18 @@ def run_ansible_playbook(playbook, inventory, workdir, playbook_dir=None,
env['ANSIBLE_CONFIG'] = ansible_cfg
elif 'ANSIBLE_CONFIG' not in env and ansible_cfg:
env['ANSIBLE_CONFIG'] = ansible_cfg
inventory_file = None
if inventory and not os.path.exists(inventory):
inventory_file = ansible_runner.utils.dump_artifact(
inventory,
ansible_artifact_path,
'hosts'
)
r_opts = {
'private_data_dir': workdir,
'project_dir': playbook_dir,
'inventory': inventory,
'inventory': (inventory_file if inventory_file else inventory),
'envvars': _encode_envvars(env=env),
'playbook': playbook,
'verbosity': verbosity,

View File

@ -91,7 +91,7 @@ def _check_diskspace(upgrade=False):
with utils.TempDirs() as tmp:
rc, _ = utils.run_ansible_playbook(
workdir=tmp,
inventory='undercloud,',
inventory='undercloud',
connection='local',
output_callback='validation_output',
playbook_dir=constants.ANSIBLE_VALIDATION_DIR,

View File

@ -15,7 +15,8 @@ import pprint
import time
from heatclient.common import event_utils
from openstackclient import shell
from tripleo_common.actions import container_images
from tripleo_common.actions import package_update
from tripleoclient import exceptions
from tripleoclient import utils
@ -25,43 +26,87 @@ from tripleoclient.workflows import base
_WORKFLOW_TIMEOUT = 120 * 60 # 2h
def update(clients, **workflow_input):
workflow_client = clients.workflow_engine
tripleoclients = clients.tripleoclient
plan_name = workflow_input['container']
def update(clients, container):
"""Update the heat stack outputs for purposes of update/upgrade.
with tripleoclients.messaging_websocket() as ws:
execution = base.start_workflow(
workflow_client,
'tripleo.package_update.v1.package_update_plan',
workflow_input=workflow_input
This workflow assumes that previously the
plan_management.update_deployment_plan workflow has already been
run to process the templates and environments (the same way as
'deploy' command processes them).
:param clients: Application client object.
:type clients: Object
:param container: Container name to pull from.
:type container: String.
"""
def _check_response(response):
"""This test checks if a response is mistral based.
Some responses are constructed using the mistral Result class, but
because the returns from methods within tripleo-common are not
type safe, this static method will check for success using the
mistral attribute, but if it does not exist the raw response will
be returned.
:param response: Object
:Type response: Object
:returns: Boolean || Object
"""
try:
return response.is_success()
except AttributeError:
return response
context = clients.tripleoclient.create_mistral_context()
container_action = container_images.PrepareContainerImageParameters(
container=container
)
success = _check_response(container_action.run(context=context))
if success is False:
raise RuntimeError(
'Prepare container image parameters failed: {}'.format(
success.to_dict()
)
)
for payload in base.wait_for_messages(workflow_client, ws, execution,
_WORKFLOW_TIMEOUT):
status = payload.get('status', 'RUNNING')
message = payload.get('message')
if message and status == "RUNNING":
print(message)
update_action = package_update.UpdateStackAction(
timeout=240,
container=container
)
success = _check_response(update_action.run(context=context))
if success is False:
raise RuntimeError(
'Upgrade failed: {}'.format(
success.to_dict()
)
)
if payload['status'] == "FAILED":
raise RuntimeError('Upgrade failed with: {}'
''.format(payload['message']))
orchestration_client = clients.orchestration
events = event_utils.get_events(orchestration_client,
stack_id=plan_name,
event_args={'sort_dir': 'desc',
'limit': 1})
events = event_utils.get_events(
clients.orchestration,
stack_id=container,
event_args={
'sort_dir': 'desc',
'limit': 1
}
)
marker = events[0].id if events else None
time.sleep(10)
create_result = utils.wait_for_stack_ready(
orchestration_client, plan_name, marker, 'UPDATE', 1)
clients.orchestration,
container,
marker,
'UPDATE',
1
)
if not create_result:
shell.OpenStackShell().run(["stack", "failures", "list", plan_name])
raise exceptions.DeploymentError("Heat Stack update failed.")
raise exceptions.DeploymentError(
'Heat Stack update failed, run the following command'
' `openstack --os-cloud undercloud stack failures list {}`'
' to investigate these failures further.'.format(container)
)
def run_on_nodes(clients, **workflow_input):