Extend UC ephemeral heat to export network
Run the commands to export the current stack's networks, virtual IPs and provisioned server as part of the OS::TripleO::UndercloudUpgradeEphemeralHeat service script. The network, virtual IP definitions and baremeteal deployment definition will will be stored in each stacks subdirectory in the working-dir. The user must provide the path to the roles data file for each overcloud stack managed by the undercloud being upgraded. For example, by adding a file with below content and including this env file using the 'custom_env_files' option in undercloud.conf. parameter_defaults: OvercloudStackRoleDataFileMap: overcloud: /home/centos/overcloud/my_roles_data.yaml Related: blueprint network-data-v2-ports Change-Id: Id04a4f71f923a25fd50ca78e478a9b5abb61bbb9
This commit is contained in:
parent
fd3b527239
commit
28105815c1
|
@ -27,6 +27,13 @@ parameters:
|
||||||
description: Mapping of service endpoint -> protocol. Typically set
|
description: Mapping of service endpoint -> protocol. Typically set
|
||||||
via parameter_defaults in the resource registry.
|
via parameter_defaults in the resource registry.
|
||||||
type: json
|
type: json
|
||||||
|
OvercloudStackRoleDataFileMap:
|
||||||
|
default: {}
|
||||||
|
description: |
|
||||||
|
Mapping of overcloud stack name, and the absolute path to the
|
||||||
|
roles data file used when deploying/updating the stack.
|
||||||
|
For example: {'overcloud': '/home/stack/roles_data.yaml'}
|
||||||
|
type: json
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
role_data:
|
role_data:
|
||||||
|
@ -48,6 +55,14 @@ outputs:
|
||||||
mode: 0755
|
mode: 0755
|
||||||
when:
|
when:
|
||||||
- step|int == 1
|
- step|int == 1
|
||||||
|
- name: Store OvercloudStackRoleDataFileMap on file
|
||||||
|
vars:
|
||||||
|
role_data_file_map: {get_param: OvercloudStackRoleDataFileMap}
|
||||||
|
copy:
|
||||||
|
dest: /var/lib/tripleo-config/overcloud-stack-role-data-file-map.yaml
|
||||||
|
content: "{{ role_data_file_map | to_nice_yaml }}"
|
||||||
|
when:
|
||||||
|
- step|int == 1
|
||||||
- name: Run undercloud-upgrade-ephemeral-heat.py
|
- name: Run undercloud-upgrade-ephemeral-heat.py
|
||||||
shell: /var/lib/tripleo-config/scripts/undercloud-upgrade-ephemeral-heat.py
|
shell: /var/lib/tripleo-config/scripts/undercloud-upgrade-ephemeral-heat.py
|
||||||
when:
|
when:
|
||||||
|
|
|
@ -31,6 +31,8 @@ from tripleo_common.utils import plan as plan_utils
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger('undercloud')
|
LOG = logging.getLogger('undercloud')
|
||||||
|
ROLE_DATA_MAP_FILE = ('/var/lib/tripleo-config/'
|
||||||
|
'overcloud-stack-role-data-file-map.yaml')
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
|
@ -133,6 +135,55 @@ def _get_ctlplane_ip():
|
||||||
['sudo', 'hiera', 'ctlplane']))
|
['sudo', 'hiera', 'ctlplane']))
|
||||||
|
|
||||||
|
|
||||||
|
def _make_stack_dirs(stacks, working_dir):
|
||||||
|
"""Create stack directory if it does not already exist
|
||||||
|
|
||||||
|
:stacks: List of overcloud stack names
|
||||||
|
:type stacks: list
|
||||||
|
:working_dir: Path to working directly
|
||||||
|
:type working_dir: str
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
for stack in stacks:
|
||||||
|
stack_dir = os.path.join(working_dir, stack)
|
||||||
|
if not os.path.exists(stack_dir):
|
||||||
|
os.makedirs(stack_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def _log_and_raise(msg):
|
||||||
|
"""Log error message and raise Exception
|
||||||
|
|
||||||
|
:msg: Message string that will be logged, and added in Exception
|
||||||
|
:type msg: str
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
LOG.error(msg)
|
||||||
|
raise Exception(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_role_data_file(stack):
|
||||||
|
"""Get the role data file for a stack
|
||||||
|
|
||||||
|
:param stack: Stack name to query for passwords
|
||||||
|
:type stack: str
|
||||||
|
:return: Path to the role data file
|
||||||
|
:rtype:: str
|
||||||
|
"""
|
||||||
|
if not os.path.isfile(ROLE_DATA_MAP_FILE):
|
||||||
|
_log_and_raise("Overcloud stack role data mapping file: {} was not "
|
||||||
|
"found.".format(ROLE_DATA_MAP_FILE))
|
||||||
|
|
||||||
|
with open(ROLE_DATA_MAP_FILE, 'r') as f:
|
||||||
|
data = yaml.safe_load(f.read())
|
||||||
|
|
||||||
|
roles_data_file = data.get(stack)
|
||||||
|
if not roles_data_file or not os.path.isfile(roles_data_file):
|
||||||
|
_log_and_raise("Roles data file: {} for stack {} not found."
|
||||||
|
.format(roles_data_file, stack))
|
||||||
|
|
||||||
|
return roles_data_file
|
||||||
|
|
||||||
|
|
||||||
def drop_db():
|
def drop_db():
|
||||||
"""Drop the heat database and heat users
|
"""Drop the heat database and heat users
|
||||||
|
|
||||||
|
@ -184,6 +235,73 @@ def export_passwords(heat, stack, stack_dir):
|
||||||
os.chmod(passwords_path, 0o600)
|
os.chmod(passwords_path, 0o600)
|
||||||
|
|
||||||
|
|
||||||
|
def export_networks(stack, stack_dir):
|
||||||
|
"""Export networks from an existing stack and write network data file.
|
||||||
|
|
||||||
|
:param stack: Stack name to query for networks
|
||||||
|
:type stack: str
|
||||||
|
:param stack_dir: Directory to save the generated network data file
|
||||||
|
containing the stack network definitions.
|
||||||
|
:type stack_dir: str
|
||||||
|
:return: None
|
||||||
|
:rtype: None
|
||||||
|
"""
|
||||||
|
network_data_path = os.path.join(
|
||||||
|
stack_dir, "tripleo-{}-network-data.yaml".format(stack))
|
||||||
|
LOG.info("Exporting network from stack %s to %s"
|
||||||
|
% (stack, network_data_path))
|
||||||
|
subprocess.check_call(['openstack', 'overcloud', 'network', 'extract',
|
||||||
|
'--stack', stack, '--output', network_data_path,
|
||||||
|
'--yes'])
|
||||||
|
os.chmod(network_data_path, 0o600)
|
||||||
|
|
||||||
|
|
||||||
|
def export_network_virtual_ips(stack, stack_dir):
|
||||||
|
"""Export network virtual IPs from an existing stack and write network
|
||||||
|
vip data file.
|
||||||
|
|
||||||
|
:param stack: Stack name to query for networks
|
||||||
|
:type stack: str
|
||||||
|
:param stack_dir: Directory to save the generated data file
|
||||||
|
containing the stack virtual IP definitions.
|
||||||
|
:type stack_dir: str
|
||||||
|
:return: None
|
||||||
|
:rtype: None
|
||||||
|
"""
|
||||||
|
vip_data_path = os.path.join(
|
||||||
|
stack_dir, "tripleo-{}-virtual-ips.yaml".format(stack))
|
||||||
|
LOG.info("Exporting network virtual IPs from stack %s to %s"
|
||||||
|
% (stack, vip_data_path))
|
||||||
|
subprocess.check_call(['openstack', 'overcloud', 'network', 'vip',
|
||||||
|
'extract', '--stack', stack, '--output',
|
||||||
|
vip_data_path, '--yes'])
|
||||||
|
os.chmod(vip_data_path, 0o600)
|
||||||
|
|
||||||
|
|
||||||
|
def export_provisioned_nodes(stack, stack_dir):
|
||||||
|
"""Export provisioned nodes from an existing stack and write baremetal
|
||||||
|
deployment definition file.
|
||||||
|
|
||||||
|
:param stack: Stack name to query for networks
|
||||||
|
:type stack: str
|
||||||
|
:param stack_dir: Directory to save the generated data file
|
||||||
|
containing the stack baremetal deployment definitions.
|
||||||
|
:type stack_dir: str
|
||||||
|
:return: None
|
||||||
|
:rtype: None
|
||||||
|
"""
|
||||||
|
roles_data_file = _get_role_data_file(stack)
|
||||||
|
bm_deployment_path = os.path.join(
|
||||||
|
stack_dir, "tripleo-{}-baremetal-deployment.yaml".format(stack))
|
||||||
|
LOG.info("Exporting provisioned nodes from stack %s to %s"
|
||||||
|
% (stack, bm_deployment_path))
|
||||||
|
subprocess.check_call(['openstack', 'overcloud', 'node', 'extract',
|
||||||
|
'provisioned', '--stack', stack, '--roles-file',
|
||||||
|
roles_data_file, '--output', bm_deployment_path,
|
||||||
|
'--yes'])
|
||||||
|
os.chmod(bm_deployment_path, 0o600)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
LOG.setLevel(logging.INFO)
|
LOG.setLevel(logging.INFO)
|
||||||
|
@ -222,6 +340,15 @@ def main():
|
||||||
"existing stack data.")
|
"existing stack data.")
|
||||||
stacks = []
|
stacks = []
|
||||||
|
|
||||||
|
# Make stack directories in the working directory if they don't not exist
|
||||||
|
_make_stack_dirs(stacks, working_dir)
|
||||||
|
|
||||||
|
for stack in stacks:
|
||||||
|
stack_dir = os.path.join(working_dir, stack)
|
||||||
|
export_networks(stack, stack_dir)
|
||||||
|
export_network_virtual_ips(stack, stack_dir)
|
||||||
|
export_provisioned_nodes(stack, stack_dir)
|
||||||
|
|
||||||
if database_exists():
|
if database_exists():
|
||||||
backup_dir = os.path.join(
|
backup_dir = os.path.join(
|
||||||
working_dir,
|
working_dir,
|
||||||
|
@ -233,8 +360,6 @@ def main():
|
||||||
|
|
||||||
for stack in stacks:
|
for stack in stacks:
|
||||||
stack_dir = os.path.join(working_dir, stack)
|
stack_dir = os.path.join(working_dir, stack)
|
||||||
if not os.path.exists(stack_dir):
|
|
||||||
os.makedirs(stack_dir)
|
|
||||||
if db_tar_path:
|
if db_tar_path:
|
||||||
# Symlink to the existing db backup
|
# Symlink to the existing db backup
|
||||||
os.symlink(db_tar_path,
|
os.symlink(db_tar_path,
|
||||||
|
|
Loading…
Reference in New Issue