Browse Source

Save needed stack outputs in working_dir

With ephemeral-heat, we need to save the needed stack outputs in the
working_dir so that the values can be used when needed if the stack is
no longer around.

With the outputs saved, the stack object no longer needs to be passed to
workflows.deployment.config_download and
workflows.deployment.get_hosts_and_enable_ssh_admin, only the stack name
and output values are needed.

Signed-off-by: James Slagle <jslagle@redhat.com>
Change-Id: I3cc61bfee94227045909a4b0ccf84a8d595b2cea
changes/09/803609/9
James Slagle 10 months ago committed by Alex Schultz
parent
commit
276a6def10
  1. 5
      releasenotes/notes/save-stack-outputs-61c2ad9528ae2529.yaml
  2. 3
      tripleoclient/constants.py
  3. 5
      tripleoclient/tests/v1/overcloud_deploy/test_overcloud_deploy.py
  4. 14
      tripleoclient/tests/workflows/test_deployment.py
  5. 29
      tripleoclient/utils.py
  6. 22
      tripleoclient/v1/overcloud_deploy.py
  7. 5
      tripleoclient/v1/overcloud_external_update.py
  8. 5
      tripleoclient/v1/overcloud_external_upgrade.py
  9. 5
      tripleoclient/v1/overcloud_upgrade.py
  10. 53
      tripleoclient/workflows/deployment.py

5
releasenotes/notes/save-stack-outputs-61c2ad9528ae2529.yaml

@ -0,0 +1,5 @@
---
other:
- Stack outputs that are needed by other functionality of the overcloud
deployment are now saved in the stack working directory in an outputs
subdirectory (default ~/overcloud-deploy/<stack>/outputs).

3
tripleoclient/constants.py

@ -85,6 +85,9 @@ PUPPET_MODULES = "/etc/puppet/modules/"
PUPPET_BASE = "/etc/puppet/"
STACK_TIMEOUT = 240
STACK_OUTPUTS = ['BlacklistedHostnames',
'RoleNetIpMap',
'BlacklistedIpAddresses']
IRONIC_HTTP_BOOT_BIND_MOUNT = '/var/lib/ironic/httpboot'
IRONIC_LOCAL_IMAGE_PATH = '/var/lib/ironic/images'

5
tripleoclient/tests/v1/overcloud_deploy/test_overcloud_deploy.py

@ -325,7 +325,8 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
clients = self.app.client_manager
orchestration_client = clients.orchestration
mock_stack = fakes.create_tht_stack()
orchestration_client.stacks.get.side_effect = [None, mock_stack]
orchestration_client.stacks.get.side_effect = \
[None, mock_stack, mock_stack]
def _orch_clt_create(**kwargs):
orchestration_client.stacks.get.return_value = mock_stack
@ -1421,7 +1422,7 @@ class TestDeployOvercloud(fakes.TestDeployOvercloud):
deployment_timeout=448, # 451 - 3, total time left
in_flight_validations=False, limit_hosts=None,
skip_tags=None, tags=None, timeout=42,
verbosity=3, forks=None)],
verbosity=3, forks=None, denyed_hostnames=None)],
fixture.mock_config_download.mock_calls)
fixture.mock_config_download.assert_called()
mock_copy.assert_called_once()

14
tripleoclient/tests/workflows/test_deployment.py

@ -66,6 +66,7 @@ class TestDeploymentWorkflows(utils.TestCommand):
def test_get_overcloud_hosts(self, mock_role_net_ip_map,
mock_blacklisted_ip_addresses):
stack = mock.Mock()
working_dir = mock.Mock()
mock_role_net_ip_map.return_value = {
'Controller': {
'ctlplane': ['1.1.1.1', '2.2.2.2', '3.3.3.3'],
@ -76,12 +77,12 @@ class TestDeploymentWorkflows(utils.TestCommand):
}
mock_blacklisted_ip_addresses.return_value = []
ips = deployment.get_overcloud_hosts(stack, 'ctlplane')
ips = deployment.get_overcloud_hosts(stack, 'ctlplane', working_dir)
expected = ['1.1.1.1', '2.2.2.2', '3.3.3.3',
'7.7.7.7', '8.8.8.8', '9.9.9.9']
self.assertEqual(sorted(expected), sorted(ips))
ips = deployment.get_overcloud_hosts(stack, 'external')
ips = deployment.get_overcloud_hosts(stack, 'external', working_dir)
expected = ['4.4.4.4', '5.5.5.5', '6.6.6.6',
'10.10.10.10', '11.11.11.11', '12.12.12.12']
self.assertEqual(sorted(expected), sorted(ips))
@ -92,6 +93,7 @@ class TestDeploymentWorkflows(utils.TestCommand):
self, mock_role_net_ip_map,
mock_blacklisted_ip_addresses):
stack = mock.Mock()
working_dir = mock.Mock()
stack.output_show.return_value = []
mock_role_net_ip_map.return_value = {
'Controller': {
@ -103,19 +105,19 @@ class TestDeploymentWorkflows(utils.TestCommand):
}
mock_blacklisted_ip_addresses.return_value = ['8.8.8.8']
ips = deployment.get_overcloud_hosts(stack, 'ctlplane')
ips = deployment.get_overcloud_hosts(stack, 'ctlplane', working_dir)
expected = ['1.1.1.1', '2.2.2.2', '3.3.3.3',
'7.7.7.7', '9.9.9.9']
self.assertEqual(sorted(expected), sorted(ips))
ips = deployment.get_overcloud_hosts(stack, 'external')
ips = deployment.get_overcloud_hosts(stack, 'external', working_dir)
expected = ['4.4.4.4', '5.5.5.5', '6.6.6.6',
'10.10.10.10', '12.12.12.12']
self.assertEqual(sorted(expected), sorted(ips))
mock_blacklisted_ip_addresses.return_value = ['7.7.7.7', '9.9.9.9',
'2.2.2.2']
ips = deployment.get_overcloud_hosts(stack, 'external')
ips = deployment.get_overcloud_hosts(stack, 'external', working_dir)
expected = ['4.4.4.4', '6.6.6.6', '11.11.11.11']
self.assertEqual(sorted(expected), sorted(ips))
@ -129,7 +131,7 @@ class TestDeploymentWorkflows(utils.TestCommand):
stack.output_show.return_value = {'output': {'output_value': []}}
clients = mock.Mock()
deployment.config_download(
log, clients, stack, 'templates', 'ssh_user',
log, clients, 'stacktest', 'templates', 'ssh_user',
'ssh_key', 'ssh_networks', 'output_dir', False,
'timeout')

29
tripleoclient/utils.py

@ -987,6 +987,15 @@ def get_stack_output_item(stack, item):
return None
def get_stack_saved_output_item(output, working_dir):
outputs_dir = os.path.join(working_dir, 'outputs')
output_path = os.path.join(outputs_dir, output)
if not os.path.isfile(output_path):
return None
with open(output_path) as f:
return yaml.safe_load(f.read())
def get_overcloud_endpoint(stack):
return get_stack_output_item(stack, 'KeystoneURL')
@ -1005,12 +1014,14 @@ def get_endpoint_map(stack):
return endpoint_map
def get_blacklisted_ip_addresses(stack):
return get_stack_output_item(stack, 'BlacklistedIpAddresses')
def get_blacklisted_ip_addresses(working_dir):
return get_stack_saved_output_item(
'BlacklistedIpAddresses', working_dir)
def get_role_net_ip_map(stack):
return get_stack_output_item(stack, 'RoleNetIpMap')
def get_role_net_ip_map(working_dir):
return get_stack_saved_output_item(
'RoleNetIpMap', working_dir)
def get_endpoint(key, stack):
@ -3182,3 +3193,13 @@ def parse_ansible_inventory(inventory_file, group):
sources=[inventory_file])
return(inventory.get_hosts(pattern=group))
def save_stack_outputs(heat, stack, working_dir):
outputs_dir = os.path.join(working_dir, 'outputs')
makedirs(outputs_dir)
for output in constants.STACK_OUTPUTS:
val = get_stack_output_item(stack, output)
output_path = os.path.join(outputs_dir, output)
with open(output_path, 'w') as f:
f.write(yaml.dump(val))

22
tripleoclient/v1/overcloud_deploy.py

@ -1146,6 +1146,11 @@ class DeployOvercloud(command.Command):
self.deploy_tripleo_heat_templates(
stack, parsed_args, new_tht_root,
user_tht_root, created_env_files)
stack = utils.get_stack(
self.orchestration_client, parsed_args.stack)
utils.save_stack_outputs(
self.orchestration_client, stack, self.working_dir)
except Exception:
if parsed_args.heat_type != 'installed' and self.heat_launcher:
self.log.info("Stopping ephemeral heat.")
@ -1153,9 +1158,10 @@ class DeployOvercloud(command.Command):
utils.rm_heat(self.heat_launcher, backup_db=True)
raise
# Get a new copy of the stack after stack update/create. If it was
# a create then the previous stack object would be None.
stack = utils.get_stack(self.orchestration_client, parsed_args.stack)
# Get a new copy of the stack after stack update/create. If it
# was a create then the previous stack object would be None.
stack = utils.get_stack(
self.orchestration_client, parsed_args.stack)
overcloud_endpoint = None
old_rcpath = None
@ -1195,7 +1201,7 @@ class DeployOvercloud(command.Command):
if not parsed_args.config_download_only:
deployment.get_hosts_and_enable_ssh_admin(
stack,
parsed_args.stack,
parsed_args.overcloud_ssh_network,
parsed_args.overcloud_ssh_user,
self.get_key_pair(parsed_args),
@ -1226,7 +1232,7 @@ class DeployOvercloud(command.Command):
deployment.config_download(
self.log,
self.clients,
stack,
parsed_args.stack,
parsed_args.overcloud_ssh_network,
config_download_dir,
parsed_args.override_ansible_cfg,
@ -1240,7 +1246,9 @@ class DeployOvercloud(command.Command):
limit_hosts=utils.playbook_limit_parse(
limit_nodes=parsed_args.limit
),
forks=parsed_args.ansible_forks)
forks=parsed_args.ansible_forks,
denyed_hostnames=utils.get_stack_saved_output_item(
'BlacklistedHostnames', self.working_dir))
deployment.set_deployment_status(
stack.stack_name,
status=deploy_status,
@ -1272,7 +1280,7 @@ class DeployOvercloud(command.Command):
deploy_status = 'DEPLOY_FAILED'
deploy_message = 'with error'
deployment.set_deployment_status(
stack.stack_name,
parsed_args.stack,
status=deploy_status,
working_dir=self.working_dir)
finally:

5
tripleoclient/v1/overcloud_external_update.py

@ -138,10 +138,7 @@ class ExternalUpdateRun(command.Command):
deployment.config_download(
log=self.log,
clients=self.app.client_manager,
stack=oooutils.get_stack(
self.app.client_manager.orchestration,
parsed_args.stack
),
stack_name=parsed_args.stack,
output_dir=ansible_dir,
verbosity=oooutils.playbook_verbosity(self=self),
ansible_playbook_name=constants.EXTERNAL_UPDATE_PLAYBOOKS,

5
tripleoclient/v1/overcloud_external_upgrade.py

@ -130,10 +130,7 @@ class ExternalUpgradeRun(command.Command):
deployment.config_download(
log=self.log,
clients=self.app.client_manager,
stack=oooutils.get_stack(
self.app.client_manager.orchestration,
parsed_args.stack
),
stack_name=parsed_args.stack,
output_dir=ansible_dir,
verbosity=oooutils.playbook_verbosity(self=self),
ansible_playbook_name=constants.EXTERNAL_UPGRADE_PLAYBOOKS,

5
tripleoclient/v1/overcloud_upgrade.py

@ -229,10 +229,7 @@ class UpgradeRun(command.Command):
deployment.config_download(
log=self.log,
clients=self.app.client_manager,
stack=oooutils.get_stack(
self.app.client_manager.orchestration,
parsed_args.stack
),
stack_name=parsed_args.stack,
output_dir=ansible_dir,
verbosity=oooutils.playbook_verbosity(self=self),
ansible_playbook_name=playbook,

53
tripleoclient/workflows/deployment.py

@ -103,10 +103,10 @@ def deploy_without_plan(clients, stack, stack_name, template,
raise exceptions.DeploymentError("Heat Stack update failed.")
def get_overcloud_hosts(stack, ssh_network):
def get_overcloud_hosts(stack, ssh_network, working_dir):
ips = []
role_net_ip_map = utils.get_role_net_ip_map(stack)
blacklisted_ips = utils.get_blacklisted_ip_addresses(stack)
role_net_ip_map = utils.get_role_net_ip_map(working_dir)
blacklisted_ips = utils.get_blacklisted_ip_addresses(working_dir)
if not role_net_ip_map:
raise exceptions.DeploymentError(
'No overcloud hosts were found in the current stack.'
@ -136,7 +136,7 @@ def get_overcloud_hosts(stack, ssh_network):
return ips
def get_hosts_and_enable_ssh_admin(stack, overcloud_ssh_network,
def get_hosts_and_enable_ssh_admin(stack_name, overcloud_ssh_network,
overcloud_ssh_user, overcloud_ssh_key,
overcloud_ssh_port_timeout,
working_dir, verbosity=0,
@ -146,8 +146,8 @@ def get_hosts_and_enable_ssh_admin(stack, overcloud_ssh_network,
Get a list of hosts from a given stack and enable admin ssh across all of
them.
:param stack: Stack data.
:type stack: Object
:param stack_name: Stack name.
:type stack_name: String
:param overcloud_ssh_network: Network id.
:type overcloud_ssh_network: String
@ -165,10 +165,10 @@ def get_hosts_and_enable_ssh_admin(stack, overcloud_ssh_network,
:type verbosity: Integer
"""
hosts = get_overcloud_hosts(stack, overcloud_ssh_network)
hosts = get_overcloud_hosts(stack_name, overcloud_ssh_network, working_dir)
if [host for host in hosts if host]:
enable_ssh_admin(
stack,
stack_name,
hosts,
overcloud_ssh_user,
overcloud_ssh_key,
@ -180,18 +180,18 @@ def get_hosts_and_enable_ssh_admin(stack, overcloud_ssh_network,
else:
raise exceptions.DeploymentError(
'Cannot find any hosts on "{}" in network "{}"'.format(
stack.stack_name,
stack_name,
overcloud_ssh_network
)
)
def enable_ssh_admin(stack, hosts, ssh_user, ssh_key, timeout,
def enable_ssh_admin(stack_name, hosts, ssh_user, ssh_key, timeout,
working_dir, verbosity=0, heat_type='installed'):
"""Run enable ssh admin access playbook.
:param stack: Stack data.
:type stack: Object
:param stack_name: Stack name.
:type stack_name: String
:param hosts: Machines to connect to.
:type hosts: List
@ -237,7 +237,7 @@ def enable_ssh_admin(stack, hosts, ssh_user, ssh_key, timeout,
extra_vars={
"ssh_user": ssh_user,
"ssh_servers": hosts,
'tripleo_cloud_name': stack.stack_name
'tripleo_cloud_name': stack_name
},
ansible_timeout=timeout
)
@ -247,14 +247,15 @@ def enable_ssh_admin(stack, hosts, ssh_user, ssh_key, timeout,
print("Enabling ssh admin - COMPLETE.")
def config_download(log, clients, stack, ssh_network='ctlplane',
def config_download(log, clients, stack_name, ssh_network='ctlplane',
output_dir=None, override_ansible_cfg=None,
timeout=600, verbosity=0, deployment_options=None,
in_flight_validations=False,
ansible_playbook_name='deploy_steps_playbook.yaml',
limit_hosts=None, extra_vars=None, inventory_path=None,
ssh_user='tripleo-admin', tags=None, skip_tags=None,
deployment_timeout=None, forks=None, working_dir=None):
deployment_timeout=None, forks=None, working_dir=None,
denyed_hostnames=None):
"""Run config download.
:param log: Logging object
@ -344,7 +345,7 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
output_dir = DEFAULT_WORK_DIR
if not working_dir:
working_dir = utils.get_default_working_dir(stack.stack_name)
working_dir = utils.get_default_working_dir(stack_name)
if not deployment_options:
deployment_options = dict()
@ -373,22 +374,18 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
_log_and_print(
message='Checking for blacklisted hosts from stack: {}'.format(
stack.stack_name
stack_name
),
logger=log,
print_msg=(verbosity == 0)
)
if not limit_hosts:
blacklist_show = stack.output_show('BlacklistedHostnames')
blacklist_stack_output = blacklist_show.get('output', dict())
blacklist_stack_output_value = blacklist_stack_output.get(
'output_value')
if blacklist_stack_output_value:
if denyed_hostnames:
limit_hosts = (
':'.join(['!{}'.format(i) for i in blacklist_stack_output_value
':'.join(['!{}'.format(i) for i in denyed_hostnames
if i]))
key_file = utils.get_key(stack.stack_name)
key_file = utils.get_key(stack_name)
python_interpreter = deployment_options.get('ansible_python_interpreter')
playbook = 'cli-config-download.yaml'
@ -402,7 +399,7 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
verbosity=verbosity,
reproduce_command=True,
extra_vars={
'plan': stack.stack_name,
'plan': stack_name,
'output_dir': output_dir,
'ansible_ssh_user': ssh_user,
'ansible_ssh_private_key_file': key_file,
@ -414,13 +411,13 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
_log_and_print(
message='Executing deployment playbook for stack: {}'.format(
stack.stack_name
stack_name
),
logger=log,
print_msg=(verbosity == 0)
)
stack_work_dir = os.path.join(output_dir, stack.stack_name)
stack_work_dir = os.path.join(output_dir, stack_name)
if not inventory_path:
inventory_path = os.path.join(stack_work_dir,
'inventory')
@ -455,7 +452,7 @@ def config_download(log, clients, stack, ssh_network='ctlplane',
_log_and_print(
message='Overcloud configuration completed for stack: {}'.format(
stack.stack_name
stack_name
),
logger=log,
print_msg=(verbosity == 0)

Loading…
Cancel
Save