2016-08-24 10:48:00 +00:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
|
|
# not use this file except in compliance with the License. You may obtain
|
|
|
|
# a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
|
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
|
|
# License for the specific language governing permissions and limitations
|
|
|
|
# under the License.
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2018-10-12 14:21:43 +00:00
|
|
|
import copy
|
2017-10-24 13:29:57 +00:00
|
|
|
import os
|
2016-08-24 10:48:00 +00:00
|
|
|
import pprint
|
2018-05-03 16:57:52 +00:00
|
|
|
import shutil
|
|
|
|
import socket
|
2017-10-24 13:29:57 +00:00
|
|
|
import subprocess
|
2018-05-03 16:57:52 +00:00
|
|
|
import tempfile
|
2016-08-25 07:47:16 +00:00
|
|
|
import time
|
|
|
|
|
|
|
|
from heatclient.common import event_utils
|
2017-02-17 15:31:03 +00:00
|
|
|
from openstackclient import shell
|
2016-08-25 07:47:16 +00:00
|
|
|
|
2018-05-03 16:57:52 +00:00
|
|
|
from tripleoclient import constants
|
2016-08-25 07:47:16 +00:00
|
|
|
from tripleoclient import exceptions
|
|
|
|
from tripleoclient import utils
|
2016-08-24 10:48:00 +00:00
|
|
|
|
2016-07-12 06:36:15 +00:00
|
|
|
from tripleoclient.workflows import base
|
|
|
|
|
2016-08-24 10:48:00 +00:00
|
|
|
|
2018-04-25 19:00:21 +00:00
|
|
|
_WORKFLOW_TIMEOUT = 360 # 6 * 60 seconds
|
|
|
|
|
|
|
|
|
2018-05-15 21:47:37 +00:00
|
|
|
def deploy(log, clients, **workflow_input):
|
2016-08-24 10:48:00 +00:00
|
|
|
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
2018-03-28 14:22:00 +00:00
|
|
|
wf_name = 'tripleo.deployment.v1.deploy_plan'
|
2016-08-24 10:48:00 +00:00
|
|
|
|
2017-10-05 20:48:39 +00:00
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
2017-03-30 02:25:34 +00:00
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
2018-03-28 14:22:00 +00:00
|
|
|
wf_name,
|
2017-03-30 02:25:34 +00:00
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
2016-11-30 07:34:48 +00:00
|
|
|
# The deploy workflow ends once the Heat create/update starts. This
|
2018-10-12 10:00:50 +00:00
|
|
|
# means that is shouldn't take very long. Wait for 10 minutes for
|
2016-11-30 07:34:48 +00:00
|
|
|
# messages from the workflow.
|
2017-03-16 09:44:31 +00:00
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution,
|
2018-10-12 10:00:50 +00:00
|
|
|
600):
|
2018-03-28 14:22:00 +00:00
|
|
|
status = payload.get('status', 'RUNNING')
|
2018-11-29 06:26:44 +00:00
|
|
|
message = payload.get('message')
|
|
|
|
if message and status == "RUNNING":
|
|
|
|
print(message)
|
2018-03-28 14:22:00 +00:00
|
|
|
|
|
|
|
if payload['status'] != "SUCCESS":
|
2018-05-15 21:47:37 +00:00
|
|
|
log.info(pprint.pformat(payload))
|
|
|
|
print(payload['message'])
|
2018-03-28 14:22:00 +00:00
|
|
|
raise ValueError("Unexpected status %s for %s"
|
|
|
|
% (payload['status'], wf_name))
|
2016-08-25 07:47:16 +00:00
|
|
|
|
|
|
|
|
2016-09-14 14:55:20 +00:00
|
|
|
def deploy_and_wait(log, clients, stack, plan_name, verbose_level,
|
2017-03-31 14:24:32 +00:00
|
|
|
timeout=None, run_validations=False,
|
2018-11-13 15:57:15 +00:00
|
|
|
skip_deploy_identifier=False, deployment_options={}):
|
2016-08-25 07:47:16 +00:00
|
|
|
"""Start the deploy and wait for it to finish"""
|
|
|
|
|
2016-09-14 14:55:20 +00:00
|
|
|
workflow_input = {
|
|
|
|
"container": plan_name,
|
2017-01-10 18:02:43 +00:00
|
|
|
"run_validations": run_validations,
|
2018-11-13 15:57:15 +00:00
|
|
|
"skip_deploy_identifier": skip_deploy_identifier,
|
|
|
|
"deployment_options": deployment_options,
|
2016-09-14 14:55:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if timeout is not None:
|
|
|
|
workflow_input['timeout'] = timeout
|
|
|
|
|
2018-05-15 21:47:37 +00:00
|
|
|
deploy(log, clients, **workflow_input)
|
2016-08-25 07:47:16 +00:00
|
|
|
|
|
|
|
orchestration_client = clients.orchestration
|
|
|
|
|
|
|
|
if stack is None:
|
|
|
|
log.info("Performing Heat stack create")
|
|
|
|
action = 'CREATE'
|
|
|
|
marker = None
|
|
|
|
else:
|
|
|
|
log.info("Performing Heat stack update")
|
|
|
|
# Make sure existing parameters for stack are reused
|
|
|
|
# Find the last top-level event to use for the first marker
|
|
|
|
events = event_utils.get_events(orchestration_client,
|
|
|
|
stack_id=plan_name,
|
|
|
|
event_args={'sort_dir': 'desc',
|
|
|
|
'limit': 1})
|
|
|
|
marker = events[0].id if events else None
|
|
|
|
action = 'UPDATE'
|
|
|
|
|
|
|
|
time.sleep(10)
|
2018-05-04 17:38:17 +00:00
|
|
|
verbose_events = verbose_level >= 1
|
2016-08-25 07:47:16 +00:00
|
|
|
create_result = utils.wait_for_stack_ready(
|
2017-03-27 22:48:49 +00:00
|
|
|
orchestration_client, plan_name, marker, action, verbose_events)
|
2016-08-25 07:47:16 +00:00
|
|
|
if not create_result:
|
2017-02-17 15:31:03 +00:00
|
|
|
shell.OpenStackShell().run(["stack", "failures", "list", plan_name])
|
2018-10-02 13:04:20 +00:00
|
|
|
set_deployment_status(clients, 'failed', plan=plan_name)
|
2016-08-25 07:47:16 +00:00
|
|
|
if stack is None:
|
|
|
|
raise exceptions.DeploymentError("Heat Stack create failed.")
|
|
|
|
else:
|
|
|
|
raise exceptions.DeploymentError("Heat Stack update failed.")
|
2016-11-17 12:23:54 +00:00
|
|
|
|
|
|
|
|
2018-02-16 14:45:56 +00:00
|
|
|
def create_overcloudrc(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
2018-09-26 09:32:08 +00:00
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.create_overcloudrc',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
2018-02-16 14:45:56 +00:00
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution):
|
|
|
|
# the workflow will return the overcloudrc data, an error message
|
|
|
|
# or blank.
|
|
|
|
if payload.get('status') == 'SUCCESS':
|
|
|
|
return payload.get('message')
|
|
|
|
else:
|
|
|
|
raise exceptions.WorkflowServiceError(
|
|
|
|
'Exception creating overcloudrc: {}'.format(
|
|
|
|
payload.get('message')))
|
2017-10-24 13:29:57 +00:00
|
|
|
|
|
|
|
|
2019-06-11 13:19:20 +00:00
|
|
|
def create_cloudsyaml(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.createcloudsyaml',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution):
|
|
|
|
# the workflow will return the overcloud cloud yaml data, an error
|
|
|
|
# message or blank.
|
|
|
|
if payload.get('status') == 'SUCCESS':
|
|
|
|
return payload.get('message')
|
|
|
|
else:
|
|
|
|
raise exceptions.WorkflowServiceError(
|
|
|
|
'Exception creating overcloud clouds.yaml file: {}'.format(
|
|
|
|
payload.get('message')))
|
|
|
|
|
|
|
|
|
2018-05-16 18:29:59 +00:00
|
|
|
def get_overcloud_hosts(stack, ssh_network):
|
|
|
|
ips = []
|
|
|
|
role_net_ip_map = utils.get_role_net_ip_map(stack)
|
2018-10-12 14:21:43 +00:00
|
|
|
blacklisted_ips = utils.get_blacklisted_ip_addresses(stack)
|
2018-05-16 18:29:59 +00:00
|
|
|
for net_ip_map in role_net_ip_map.values():
|
2018-10-12 14:21:43 +00:00
|
|
|
# get a copy of the lists of ssh_network and ctlplane ips
|
|
|
|
# as blacklisted_ips will only be the ctlplane ips, we need
|
|
|
|
# both lists to determine which to actually blacklist
|
|
|
|
net_ips = copy.copy(net_ip_map.get(ssh_network, []))
|
|
|
|
ctlplane_ips = copy.copy(net_ip_map.get('ctlplane', []))
|
|
|
|
|
|
|
|
blacklisted_ctlplane_ips = \
|
|
|
|
[ip for ip in ctlplane_ips if ip in blacklisted_ips]
|
|
|
|
|
|
|
|
# for each blacklisted ctlplane ip, remove the corresponding
|
|
|
|
# ssh_network ip at that same index in the net_ips list
|
|
|
|
for bcip in blacklisted_ctlplane_ips:
|
|
|
|
index = ctlplane_ips.index(bcip)
|
|
|
|
ctlplane_ips.pop(index)
|
|
|
|
net_ips.pop(index)
|
|
|
|
|
|
|
|
ips.extend(net_ips)
|
2017-10-24 13:29:57 +00:00
|
|
|
|
2018-05-16 18:29:59 +00:00
|
|
|
return ips
|
2017-11-29 13:41:18 +00:00
|
|
|
|
|
|
|
|
2018-05-03 16:57:52 +00:00
|
|
|
def wait_for_ssh_port(host):
|
|
|
|
start = int(time.time())
|
2017-11-29 13:41:18 +00:00
|
|
|
while True:
|
2018-05-03 16:57:52 +00:00
|
|
|
now = int(time.time())
|
|
|
|
if (now - start) > constants.ENABLE_SSH_ADMIN_SSH_PORT_TIMEOUT:
|
|
|
|
raise exceptions.DeploymentError(
|
|
|
|
"Timed out waiting for port 22 from %s" % host)
|
2019-03-13 07:24:17 +00:00
|
|
|
# first check ipv4 then check ipv6
|
2018-05-03 16:57:52 +00:00
|
|
|
try:
|
2019-08-02 16:09:31 +00:00
|
|
|
sock = socket.socket()
|
|
|
|
sock.connect((host, 22))
|
|
|
|
sock.close()
|
2018-05-03 16:57:52 +00:00
|
|
|
return
|
|
|
|
except socket.error:
|
2019-03-13 07:24:17 +00:00
|
|
|
try:
|
2019-08-02 16:09:31 +00:00
|
|
|
sock = socket.socket(socket.AF_INET6)
|
|
|
|
sock.connect((host, 22))
|
|
|
|
sock.close()
|
2019-03-13 07:24:17 +00:00
|
|
|
return
|
|
|
|
except socket.error:
|
|
|
|
pass
|
2018-05-03 16:57:52 +00:00
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
|
2018-11-01 12:20:51 +00:00
|
|
|
def get_hosts_and_enable_ssh_admin(log, clients, stack, overcloud_ssh_network,
|
|
|
|
overcloud_ssh_user, overcloud_ssh_key):
|
|
|
|
hosts = get_overcloud_hosts(stack, overcloud_ssh_network)
|
2019-01-28 18:09:27 +00:00
|
|
|
if [host for host in hosts if host]:
|
|
|
|
|
|
|
|
try:
|
|
|
|
enable_ssh_admin(log, clients, stack.stack_name, hosts,
|
|
|
|
overcloud_ssh_user, overcloud_ssh_key)
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
if e.returncode == 255:
|
|
|
|
log.error("Couldn't not import keys to one of {}. "
|
|
|
|
"Check if the user/ip are corrects.\n".format(hosts))
|
|
|
|
else:
|
|
|
|
log.error("Unknown error. "
|
|
|
|
"Original message is:\n{}".format(hosts, e.message))
|
|
|
|
|
|
|
|
else:
|
|
|
|
raise exceptions.DeploymentError("Cannot find any hosts on '{}'"
|
|
|
|
" in network '{}'"
|
|
|
|
.format(stack.stack_name,
|
|
|
|
overcloud_ssh_network))
|
2018-11-01 12:20:51 +00:00
|
|
|
|
|
|
|
|
2018-10-01 14:37:09 +00:00
|
|
|
def enable_ssh_admin(log, clients, plan_name, hosts, ssh_user, ssh_key):
|
2018-05-03 16:57:52 +00:00
|
|
|
print("Enabling ssh admin (tripleo-admin) for hosts:")
|
|
|
|
print(" ".join(hosts))
|
|
|
|
print("Using ssh user %s for initial connection." % ssh_user)
|
|
|
|
print("Using ssh key at %s for initial connection." % ssh_key)
|
|
|
|
|
|
|
|
ssh_options = ("-o ConnectionAttempts=6 "
|
|
|
|
"-o ConnectTimeout=30 "
|
|
|
|
"-o StrictHostKeyChecking=no "
|
2018-09-14 21:12:49 +00:00
|
|
|
"-o PasswordAuthentication=no "
|
2018-05-03 16:57:52 +00:00
|
|
|
"-o UserKnownHostsFile=/dev/null")
|
|
|
|
tmp_key_dir = tempfile.mkdtemp()
|
|
|
|
tmp_key_private = os.path.join(tmp_key_dir, 'id_rsa')
|
|
|
|
tmp_key_public = os.path.join(tmp_key_dir, 'id_rsa.pub')
|
|
|
|
tmp_key_comment = "TripleO split stack short term key"
|
|
|
|
|
|
|
|
try:
|
|
|
|
tmp_key_command = ["ssh-keygen", "-N", "", "-t", "rsa", "-b", "4096",
|
|
|
|
"-f", tmp_key_private, "-C", tmp_key_comment]
|
2019-08-05 13:21:16 +00:00
|
|
|
DEVNULL = open(os.devnull, 'w')
|
|
|
|
try:
|
|
|
|
subprocess.check_call(tmp_key_command, stdout=DEVNULL,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
except subprocess.CalledProcessError as exc:
|
|
|
|
log.error("ssh-keygen has failed with return code {0}".
|
|
|
|
format(exc.returncode))
|
|
|
|
else:
|
|
|
|
log.info("ssh-keygen has been run successfully")
|
|
|
|
DEVNULL.close()
|
|
|
|
|
2019-08-02 16:09:31 +00:00
|
|
|
with open(tmp_key_public) as pubkey:
|
|
|
|
tmp_key_public_contents = pubkey.read()
|
|
|
|
with open(tmp_key_private) as privkey:
|
|
|
|
tmp_key_private_contents = privkey.read()
|
2018-05-03 16:57:52 +00:00
|
|
|
|
|
|
|
for host in hosts:
|
|
|
|
wait_for_ssh_port(host)
|
|
|
|
copy_tmp_key_command = ["ssh"] + ssh_options.split()
|
|
|
|
copy_tmp_key_command += \
|
|
|
|
["-o", "StrictHostKeyChecking=no",
|
|
|
|
"-i", ssh_key, "-l", ssh_user, host,
|
|
|
|
"echo -e '\n%s' >> $HOME/.ssh/authorized_keys" %
|
|
|
|
tmp_key_public_contents]
|
|
|
|
print("Inserting TripleO short term key for %s" % host)
|
|
|
|
subprocess.check_call(copy_tmp_key_command,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
print("Starting ssh admin enablement workflow")
|
|
|
|
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
|
|
|
|
workflow_input = {
|
|
|
|
"ssh_user": ssh_user,
|
|
|
|
"ssh_servers": hosts,
|
2019-08-02 16:09:31 +00:00
|
|
|
"ssh_private_key": tmp_key_private_contents,
|
2018-10-01 14:37:09 +00:00
|
|
|
"plan_name": plan_name
|
2018-05-03 16:57:52 +00:00
|
|
|
}
|
2017-10-24 13:29:57 +00:00
|
|
|
|
2018-05-03 16:57:52 +00:00
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.access.v1.enable_ssh_admin',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
|
|
|
start = int(time.time())
|
|
|
|
while True:
|
|
|
|
now = int(time.time())
|
|
|
|
if (now - start) > constants.ENABLE_SSH_ADMIN_TIMEOUT:
|
|
|
|
raise exceptions.DeploymentError(
|
|
|
|
"ssh admin enablement workflow - TIMED OUT.")
|
|
|
|
|
|
|
|
time.sleep(1)
|
|
|
|
execution = workflow_client.executions.get(execution.id)
|
|
|
|
state = execution.state
|
|
|
|
|
|
|
|
if state == 'RUNNING':
|
|
|
|
if (now - start) % constants.ENABLE_SSH_ADMIN_STATUS_INTERVAL\
|
|
|
|
== 0:
|
|
|
|
print("ssh admin enablement workflow - RUNNING.")
|
|
|
|
continue
|
|
|
|
elif state == 'SUCCESS':
|
|
|
|
print("ssh admin enablement workflow - COMPLETE.")
|
|
|
|
break
|
2018-08-06 12:52:24 +00:00
|
|
|
elif state in ('FAILED', 'ERROR'):
|
|
|
|
error = "ssh admin enablement workflow - FAILED.\n"
|
|
|
|
error += execution.to_dict()['state_info']
|
|
|
|
raise exceptions.DeploymentError(error)
|
2018-05-03 16:57:52 +00:00
|
|
|
|
|
|
|
for host in hosts:
|
|
|
|
rm_tmp_key_command = ["ssh"] + ssh_options.split()
|
|
|
|
rm_tmp_key_command += \
|
2018-08-06 20:28:12 +00:00
|
|
|
["-i", ssh_key, "-l", ssh_user, host,
|
2018-05-03 16:57:52 +00:00
|
|
|
"sed -i -e '/%s/d' $HOME/.ssh/authorized_keys" %
|
|
|
|
tmp_key_comment]
|
|
|
|
print("Removing TripleO short term key from %s" % host)
|
|
|
|
subprocess.check_call(rm_tmp_key_command, stderr=subprocess.STDOUT)
|
|
|
|
finally:
|
|
|
|
print("Removing short term keys locally")
|
|
|
|
shutil.rmtree(tmp_key_dir)
|
|
|
|
|
|
|
|
print("Enabling ssh admin - COMPLETE.")
|
|
|
|
|
|
|
|
|
|
|
|
def config_download(log, clients, stack, templates,
|
2018-07-26 21:46:39 +00:00
|
|
|
ssh_user, ssh_key, ssh_network,
|
2019-07-31 16:09:38 +00:00
|
|
|
output_dir, override_ansible_cfg, timeout, verbosity=1,
|
2019-08-06 08:05:25 +00:00
|
|
|
deployment_options={},
|
|
|
|
in_flight_validations=True):
|
2017-10-24 13:29:57 +00:00
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
2019-08-06 08:05:25 +00:00
|
|
|
if in_flight_validations:
|
|
|
|
skip_tags = ''
|
|
|
|
else:
|
|
|
|
skip_tags = 'opendev-validation'
|
|
|
|
|
2018-01-18 08:54:32 +00:00
|
|
|
workflow_input = {
|
2019-05-20 21:41:44 +00:00
|
|
|
'verbosity': verbosity,
|
2018-05-16 18:29:59 +00:00
|
|
|
'plan_name': stack.stack_name,
|
2018-07-26 21:46:39 +00:00
|
|
|
'ssh_network': ssh_network,
|
2019-07-31 16:09:38 +00:00
|
|
|
'config_download_timeout': timeout,
|
|
|
|
'deployment_options': deployment_options,
|
2019-08-06 08:05:25 +00:00
|
|
|
'skip_tags': skip_tags
|
2018-01-18 08:54:32 +00:00
|
|
|
}
|
2017-10-24 18:34:45 +00:00
|
|
|
if output_dir:
|
|
|
|
workflow_input.update(dict(work_dir=output_dir))
|
2018-07-19 21:37:48 +00:00
|
|
|
if override_ansible_cfg:
|
2019-08-02 16:09:31 +00:00
|
|
|
with open(override_ansible_cfg) as cfg:
|
|
|
|
override_ansible_cfg_contents = cfg.read()
|
2018-07-19 21:37:48 +00:00
|
|
|
workflow_input.update(
|
|
|
|
dict(override_ansible_cfg=override_ansible_cfg_contents))
|
2017-10-24 18:34:45 +00:00
|
|
|
|
2017-10-24 13:29:57 +00:00
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.config_download_deploy',
|
2017-10-24 18:34:45 +00:00
|
|
|
workflow_input=workflow_input
|
2017-10-24 13:29:57 +00:00
|
|
|
)
|
|
|
|
|
2018-11-28 15:19:24 +00:00
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution):
|
2017-10-24 13:29:57 +00:00
|
|
|
print(payload['message'])
|
|
|
|
|
|
|
|
if payload['status'] == 'SUCCESS':
|
|
|
|
print("Overcloud configuration completed.")
|
|
|
|
else:
|
|
|
|
raise exceptions.DeploymentError("Overcloud configuration failed.")
|
2017-11-06 16:49:10 +00:00
|
|
|
|
|
|
|
|
2018-07-25 22:15:56 +00:00
|
|
|
def config_download_export(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
2018-09-26 09:32:08 +00:00
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.config_download_export',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
2018-07-25 22:15:56 +00:00
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution,
|
|
|
|
_WORKFLOW_TIMEOUT):
|
2018-11-29 06:26:44 +00:00
|
|
|
message = payload.get('message')
|
|
|
|
if message:
|
|
|
|
print(message)
|
2018-07-25 22:15:56 +00:00
|
|
|
|
|
|
|
if payload['status'] == 'SUCCESS':
|
|
|
|
return payload['tempurl']
|
|
|
|
else:
|
|
|
|
raise exceptions.WorkflowServiceError(
|
|
|
|
'Exception exporting config-download: {}'.format(
|
|
|
|
payload['message']))
|
|
|
|
|
|
|
|
|
2017-11-06 16:49:10 +00:00
|
|
|
def get_horizon_url(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.get_horizon_url',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution,
|
|
|
|
360):
|
|
|
|
assert payload['status'] == "SUCCESS"
|
|
|
|
|
|
|
|
return payload['horizon_url']
|
2018-04-25 19:00:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_deployment_status(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
2018-09-26 09:32:08 +00:00
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.v1.get_deployment_status',
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
2018-04-25 19:00:21 +00:00
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution,
|
|
|
|
_WORKFLOW_TIMEOUT):
|
2018-11-29 06:26:44 +00:00
|
|
|
message = payload.get('message')
|
|
|
|
if message:
|
|
|
|
print(message)
|
2018-04-25 19:00:21 +00:00
|
|
|
|
|
|
|
if payload['status'] == 'SUCCESS':
|
|
|
|
return payload['deployment_status']
|
|
|
|
else:
|
|
|
|
raise exceptions.WorkflowServiceError(
|
|
|
|
'Exception getting deployment status: {}'.format(
|
|
|
|
payload.get('message', '')))
|
2018-05-14 16:51:15 +00:00
|
|
|
|
|
|
|
|
2018-10-02 13:04:20 +00:00
|
|
|
def set_deployment_status(clients, status='success', **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
tripleoclients = clients.tripleoclient
|
|
|
|
|
|
|
|
if status == 'success':
|
|
|
|
workflow = 'tripleo.deployment.v1.set_deployment_status_success'
|
|
|
|
elif status == 'failed':
|
|
|
|
workflow = 'tripleo.deployment.v1.set_deployment_status_failed'
|
|
|
|
elif status == 'deploying':
|
|
|
|
workflow = 'tripleo.deployment.v1.set_deployment_status_deploying'
|
|
|
|
else:
|
|
|
|
raise Exception("Can't set unknown deployment status: %s" % status)
|
|
|
|
|
|
|
|
with tripleoclients.messaging_websocket() as ws:
|
|
|
|
execution = base.start_workflow(
|
|
|
|
workflow_client,
|
|
|
|
workflow,
|
|
|
|
workflow_input=workflow_input
|
|
|
|
)
|
|
|
|
|
|
|
|
for payload in base.wait_for_messages(workflow_client, ws, execution,
|
|
|
|
_WORKFLOW_TIMEOUT):
|
|
|
|
# Just continue until workflow is done
|
|
|
|
continue
|
|
|
|
|
|
|
|
if payload['status'] != 'SUCCESS':
|
|
|
|
raise exceptions.WorkflowServiceError(
|
|
|
|
'Exception setting deployment status: {}'.format(
|
|
|
|
payload.get('message', '')))
|
|
|
|
|
|
|
|
|
2018-05-14 16:51:15 +00:00
|
|
|
def get_deployment_failures(clients, **workflow_input):
|
|
|
|
workflow_client = clients.workflow_engine
|
|
|
|
|
2018-09-25 12:29:17 +00:00
|
|
|
result = base.call_action(
|
|
|
|
workflow_client,
|
|
|
|
'tripleo.deployment.get_deployment_failures',
|
|
|
|
**workflow_input
|
|
|
|
)
|
2018-09-26 09:32:08 +00:00
|
|
|
|
2018-11-29 06:26:44 +00:00
|
|
|
message = result.get('message')
|
|
|
|
if message:
|
|
|
|
print(message)
|
2018-05-14 16:51:15 +00:00
|
|
|
|
2018-09-25 12:29:17 +00:00
|
|
|
return result['failures']
|