Merge "Update "dcmanager subcloud add" to use phased operations"
This commit is contained in:
commit
2209aeb872
@ -10,7 +10,6 @@ import os
|
||||
from oslo_log import log as logging
|
||||
from oslo_messaging import RemoteError
|
||||
import pecan
|
||||
import tsconfig.tsconfig as tsc
|
||||
import yaml
|
||||
|
||||
from dcmanager.api.controllers import restcomm
|
||||
@ -170,41 +169,19 @@ class PhasedSubcloudDeployController(object):
|
||||
|
||||
payload = get_create_payload(request)
|
||||
|
||||
if not payload:
|
||||
pecan.abort(400, _('Body required'))
|
||||
|
||||
psd_common.validate_bootstrap_values(payload)
|
||||
|
||||
# If a subcloud release is not passed, use the current
|
||||
# system controller software_version
|
||||
payload['software_version'] = payload.get('release', tsc.SW_VERSION)
|
||||
|
||||
psd_common.validate_subcloud_name_availability(context, payload['name'])
|
||||
|
||||
psd_common.validate_system_controller_patch_status("create")
|
||||
|
||||
psd_common.validate_subcloud_config(context, payload)
|
||||
|
||||
psd_common.validate_install_values(payload)
|
||||
|
||||
psd_common.validate_k8s_version(payload)
|
||||
|
||||
psd_common.format_ip_address(payload)
|
||||
|
||||
# Upload the deploy config files if it is included in the request
|
||||
# It has a dependency on the subcloud name, and it is called after
|
||||
# the name has been validated
|
||||
psd_common.upload_deploy_config_file(request, payload)
|
||||
psd_common.pre_deploy_create(payload, context, request)
|
||||
|
||||
try:
|
||||
# Add the subcloud details to the database
|
||||
subcloud = psd_common.add_subcloud_to_database(context, payload)
|
||||
|
||||
# Ask dcmanager-manager to add the subcloud.
|
||||
# Ask dcmanager-manager to create the subcloud.
|
||||
# It will do all the real work...
|
||||
subcloud = self.dcmanager_rpc_client.subcloud_deploy_create(
|
||||
context, subcloud.id, payload)
|
||||
return subcloud
|
||||
|
||||
subcloud_dict = db_api.subcloud_db_model_to_dict(subcloud)
|
||||
return subcloud_dict
|
||||
|
||||
except RemoteError as e:
|
||||
pecan.abort(httpclient.UNPROCESSABLE_ENTITY, e.value)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -151,6 +151,18 @@ def validate_system_controller_patch_status(operation: str):
|
||||
% operation)
|
||||
|
||||
|
||||
def validate_migrate_parameter(payload, request):
|
||||
migrate_str = payload.get('migrate')
|
||||
if migrate_str is not None:
|
||||
if migrate_str not in ["true", "false"]:
|
||||
pecan.abort(400, _('The migrate option is invalid, '
|
||||
'valid options are true and false.'))
|
||||
|
||||
if consts.DEPLOY_CONFIG in request.POST:
|
||||
pecan.abort(400, _('migrate with deploy-config is '
|
||||
'not allowed'))
|
||||
|
||||
|
||||
def validate_subcloud_config(context, payload, operation=None,
|
||||
ignore_conflicts_with=None):
|
||||
"""Check whether subcloud config is valid."""
|
||||
@ -452,7 +464,7 @@ def validate_install_values(payload, subcloud=None):
|
||||
"""
|
||||
install_values = payload.get('install_values')
|
||||
if not install_values:
|
||||
return False
|
||||
return
|
||||
|
||||
original_install_values = None
|
||||
if subcloud:
|
||||
@ -490,6 +502,7 @@ def validate_install_values(payload, subcloud=None):
|
||||
LOG.debug("software_version (%s) is added to install_values" %
|
||||
software_version)
|
||||
payload['install_values'].update({'software_version': software_version})
|
||||
|
||||
if 'persistent_size' in install_values:
|
||||
persistent_size = install_values.get('persistent_size')
|
||||
if not isinstance(persistent_size, int):
|
||||
@ -501,6 +514,7 @@ def validate_install_values(payload, subcloud=None):
|
||||
pecan.abort(400, _("persistent_size of %s MB is less than "
|
||||
"the permitted minimum %s MB ") %
|
||||
(str(persistent_size), consts.DEFAULT_PERSISTENT_SIZE))
|
||||
|
||||
if 'hw_settle' in install_values:
|
||||
hw_settle = install_values.get('hw_settle')
|
||||
if not isinstance(hw_settle, int):
|
||||
@ -510,6 +524,24 @@ def validate_install_values(payload, subcloud=None):
|
||||
pecan.abort(400, _("hw_settle of %s seconds is less than 0") %
|
||||
(str(hw_settle)))
|
||||
|
||||
if 'extra_boot_params' in install_values:
|
||||
# Validate 'extra_boot_params' boot parameter
|
||||
# Note: this must be a single string (no spaces). If
|
||||
# multiple boot parameters are required they can be
|
||||
# separated by commas. They will be split into separate
|
||||
# arguments by the miniboot.cfg kickstart.
|
||||
extra_boot_params = install_values.get('extra_boot_params')
|
||||
if extra_boot_params in ('', None, 'None'):
|
||||
msg = "The install value extra_boot_params must not be empty."
|
||||
pecan.abort(400, _(msg))
|
||||
if ' ' in extra_boot_params:
|
||||
msg = (
|
||||
"Invalid install value 'extra_boot_params="
|
||||
f"{extra_boot_params}'. Spaces are not allowed "
|
||||
"(use ',' to separate multiple arguments)"
|
||||
)
|
||||
pecan.abort(400, _(msg))
|
||||
|
||||
for k in dccommon_consts.MANDATORY_INSTALL_VALUES:
|
||||
if k not in install_values:
|
||||
if original_install_values:
|
||||
@ -592,8 +624,6 @@ def validate_install_values(payload, subcloud=None):
|
||||
LOG.exception(e)
|
||||
pecan.abort(400, _("rd.net.timeout.ipv6dad invalid: %s") % e)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_k8s_version(payload):
|
||||
"""Validate k8s version.
|
||||
@ -677,18 +707,20 @@ def format_ip_address(payload):
|
||||
|
||||
|
||||
def upload_deploy_config_file(request, payload):
|
||||
if consts.DEPLOY_CONFIG in request.POST:
|
||||
file_item = request.POST[consts.DEPLOY_CONFIG]
|
||||
file_item = request.POST.get(consts.DEPLOY_CONFIG)
|
||||
if file_item is None:
|
||||
return
|
||||
|
||||
filename = getattr(file_item, 'filename', '')
|
||||
if not filename:
|
||||
pecan.abort(400, _("No %s file uploaded"
|
||||
% consts.DEPLOY_CONFIG))
|
||||
pecan.abort(400, _("No %s file uploaded" % consts.DEPLOY_CONFIG))
|
||||
|
||||
file_item.file.seek(0, os.SEEK_SET)
|
||||
contents = file_item.file.read()
|
||||
# the deploy config needs to upload to the override location
|
||||
fn = get_config_file_path(payload['name'], consts.DEPLOY_CONFIG)
|
||||
upload_config_file(contents, fn, consts.DEPLOY_CONFIG)
|
||||
payload.update({consts.DEPLOY_CONFIG: fn})
|
||||
payload[consts.DEPLOY_CONFIG] = fn
|
||||
get_common_deploy_files(payload, payload['software_version'])
|
||||
|
||||
|
||||
@ -718,8 +750,7 @@ def upload_config_file(file_item, config_file, config_type):
|
||||
def get_common_deploy_files(payload, software_version):
|
||||
missing_deploy_files = []
|
||||
for f in consts.DEPLOY_COMMON_FILE_OPTIONS:
|
||||
# Skip the prestage_images option as it is
|
||||
# not relevant in this context
|
||||
# Skip the prestage_images option as it is not relevant in this context
|
||||
if f == consts.DEPLOY_PRESTAGE:
|
||||
continue
|
||||
filename = None
|
||||
@ -858,6 +889,35 @@ def populate_payload_with_pre_existing_data(payload: dict,
|
||||
get_common_deploy_files(payload, subcloud.software_version)
|
||||
|
||||
|
||||
def pre_deploy_create(payload: dict, context: RequestContext,
|
||||
request: pecan.Request):
|
||||
if not payload:
|
||||
pecan.abort(400, _('Body required'))
|
||||
|
||||
validate_bootstrap_values(payload)
|
||||
|
||||
# If a subcloud release is not passed, use the current
|
||||
# system controller software_version
|
||||
payload['software_version'] = payload.get('release', tsc.SW_VERSION)
|
||||
|
||||
validate_subcloud_name_availability(context, payload['name'])
|
||||
|
||||
validate_system_controller_patch_status("create")
|
||||
|
||||
validate_subcloud_config(context, payload)
|
||||
|
||||
validate_install_values(payload)
|
||||
|
||||
validate_k8s_version(payload)
|
||||
|
||||
format_ip_address(payload)
|
||||
|
||||
# Upload the deploy config files if it is included in the request
|
||||
# It has a dependency on the subcloud name, and it is called after
|
||||
# the name has been validated
|
||||
upload_deploy_config_file(request, payload)
|
||||
|
||||
|
||||
def pre_deploy_install(payload: dict, validate_password=False):
|
||||
if validate_password:
|
||||
validate_sysadmin_password(payload)
|
||||
|
@ -99,10 +99,10 @@ class DCManagerService(service.Service):
|
||||
super(DCManagerService, self).start()
|
||||
|
||||
@request_context
|
||||
def add_subcloud(self, context, payload):
|
||||
def add_subcloud(self, context, subcloud_id, payload):
|
||||
# Adds a subcloud
|
||||
LOG.info("Handling add_subcloud request for: %s" % payload.get('name'))
|
||||
return self.subcloud_manager.add_subcloud(context, payload)
|
||||
return self.subcloud_manager.add_subcloud(context, subcloud_id, payload)
|
||||
|
||||
@request_context
|
||||
def delete_subcloud(self, context, subcloud_id):
|
||||
|
@ -105,8 +105,10 @@ CERT_NAMESPACE = "dc-cert"
|
||||
TRANSITORY_STATES = {
|
||||
consts.DEPLOY_STATE_NONE: consts.DEPLOY_STATE_DEPLOY_PREP_FAILED,
|
||||
consts.DEPLOY_STATE_PRE_DEPLOY: consts.DEPLOY_STATE_DEPLOY_PREP_FAILED,
|
||||
consts.DEPLOY_STATE_CREATING: consts.DEPLOY_STATE_CREATE_FAILED,
|
||||
consts.DEPLOY_STATE_PRE_INSTALL: consts.DEPLOY_STATE_PRE_INSTALL_FAILED,
|
||||
consts.DEPLOY_STATE_INSTALLING: consts.DEPLOY_STATE_INSTALL_FAILED,
|
||||
consts.DEPLOY_STATE_PRE_BOOTSTRAP: consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED,
|
||||
consts.DEPLOY_STATE_BOOTSTRAPPING: consts.DEPLOY_STATE_BOOTSTRAP_FAILED,
|
||||
consts.DEPLOY_STATE_PRE_CONFIG: consts.DEPLOY_STATE_PRE_CONFIG_FAILED,
|
||||
consts.DEPLOY_STATE_CONFIGURING: consts.DEPLOY_STATE_CONFIG_FAILED,
|
||||
@ -245,11 +247,10 @@ class SubcloudManager(manager.Manager):
|
||||
software_version if software_version else SW_VERSION]
|
||||
return install_command
|
||||
|
||||
# TODO(gherzman): rename compose_apply_command to compose_bootstrap_command
|
||||
def compose_apply_command(self, subcloud_name,
|
||||
def compose_bootstrap_command(self, subcloud_name,
|
||||
ansible_subcloud_inventory_file,
|
||||
software_version=None):
|
||||
apply_command = [
|
||||
bootstrap_command = [
|
||||
"ansible-playbook",
|
||||
utils.get_playbook_for_software_version(
|
||||
ANSIBLE_SUBCLOUD_PLAYBOOK, software_version),
|
||||
@ -258,23 +259,22 @@ class SubcloudManager(manager.Manager):
|
||||
]
|
||||
# Add the overrides dir and region_name so the playbook knows
|
||||
# which overrides to load
|
||||
apply_command += [
|
||||
bootstrap_command += [
|
||||
"-e", str("override_files_dir='%s' region_name=%s") % (
|
||||
dccommon_consts.ANSIBLE_OVERRIDES_PATH, subcloud_name),
|
||||
"-e", "install_release_version=%s" %
|
||||
software_version if software_version else SW_VERSION]
|
||||
return apply_command
|
||||
return bootstrap_command
|
||||
|
||||
# TODO(vgluzrom): rename compose_deploy_command to compose_config_command
|
||||
def compose_deploy_command(self, subcloud_name, ansible_subcloud_inventory_file, payload):
|
||||
deploy_command = [
|
||||
def compose_config_command(self, subcloud_name, ansible_subcloud_inventory_file, payload):
|
||||
config_command = [
|
||||
"ansible-playbook", payload[consts.DEPLOY_PLAYBOOK],
|
||||
"-e", "@%s" % dccommon_consts.ANSIBLE_OVERRIDES_PATH + "/" +
|
||||
subcloud_name + '_deploy_values.yml',
|
||||
"-i", ansible_subcloud_inventory_file,
|
||||
"--limit", subcloud_name
|
||||
]
|
||||
return deploy_command
|
||||
return config_command
|
||||
|
||||
def compose_backup_command(self, subcloud_name, ansible_subcloud_inventory_file):
|
||||
backup_command = [
|
||||
@ -331,205 +331,71 @@ class SubcloudManager(manager.Manager):
|
||||
dccommon_consts.ANSIBLE_OVERRIDES_PATH, subcloud_name)]
|
||||
return rehome_command
|
||||
|
||||
def add_subcloud(self, context, payload):
|
||||
"""Add subcloud and notify orchestrators.
|
||||
|
||||
:param context: request context object
|
||||
:param payload: subcloud configuration
|
||||
"""
|
||||
LOG.info("Adding subcloud %s." % payload['name'])
|
||||
subcloud_id = db_api.subcloud_get_by_name(context, payload['name']).id
|
||||
|
||||
# Check the migrate option from payload
|
||||
migrate_str = payload.get('migrate', '')
|
||||
migrate_flag = (migrate_str.lower() == 'true')
|
||||
if migrate_flag:
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud_id,
|
||||
deploy_status=consts.DEPLOY_STATE_PRE_REHOME)
|
||||
else:
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud_id,
|
||||
deploy_status=consts.DEPLOY_STATE_PRE_DEPLOY)
|
||||
|
||||
try:
|
||||
def rehome_subcloud(self, context, subcloud, payload):
|
||||
# Ansible inventory filename for the specified subcloud
|
||||
ansible_subcloud_inventory_file = self._get_ansible_filename(
|
||||
subcloud.name, INVENTORY_FILE_POSTFIX)
|
||||
|
||||
# Create a new route to this subcloud on the management interface
|
||||
# on both controllers.
|
||||
m_ks_client = OpenStackDriver(
|
||||
region_name=dccommon_consts.DEFAULT_REGION_NAME,
|
||||
region_clients=None).keystone_client
|
||||
subcloud_subnet = netaddr.IPNetwork(utils.get_management_subnet(payload))
|
||||
endpoint = m_ks_client.endpoint_cache.get_endpoint('sysinv')
|
||||
sysinv_client = SysinvClient(dccommon_consts.DEFAULT_REGION_NAME,
|
||||
m_ks_client.session,
|
||||
endpoint=endpoint)
|
||||
LOG.debug("Getting cached regionone data for %s" % subcloud.name)
|
||||
cached_regionone_data = self._get_cached_regionone_data(m_ks_client, sysinv_client)
|
||||
for mgmt_if_uuid in cached_regionone_data['mgmt_interface_uuids']:
|
||||
sysinv_client.create_route(mgmt_if_uuid,
|
||||
str(subcloud_subnet.ip),
|
||||
subcloud_subnet.prefixlen,
|
||||
payload['systemcontroller_gateway_address'],
|
||||
1)
|
||||
|
||||
# Create endpoints to this subcloud on the
|
||||
# management-start-ip of the subcloud which will be allocated
|
||||
# as the floating Management IP of the Subcloud if the
|
||||
# Address Pool is not shared. Incase the endpoint entries
|
||||
# are incorrect, or the management IP of the subcloud is changed
|
||||
# in the future, it will not go managed or will show up as
|
||||
# out of sync. To fix this use Openstack endpoint commands
|
||||
# on the SystemController to change the subcloud endpoints.
|
||||
# The non-identity endpoints are added to facilitate horizon access
|
||||
# from the System Controller to the subcloud.
|
||||
endpoint_config = []
|
||||
endpoint_ip = utils.get_management_start_address(payload)
|
||||
if netaddr.IPAddress(endpoint_ip).version == 6:
|
||||
endpoint_ip = '[' + endpoint_ip + ']'
|
||||
|
||||
for service in m_ks_client.services_list:
|
||||
if service.type == dccommon_consts.ENDPOINT_TYPE_PLATFORM:
|
||||
admin_endpoint_url = "https://{}:6386/v1".format(endpoint_ip)
|
||||
endpoint_config.append({"id": service.id,
|
||||
"admin_endpoint_url": admin_endpoint_url})
|
||||
elif service.type == dccommon_consts.ENDPOINT_TYPE_IDENTITY:
|
||||
admin_endpoint_url = "https://{}:5001/v3".format(endpoint_ip)
|
||||
endpoint_config.append({"id": service.id,
|
||||
"admin_endpoint_url": admin_endpoint_url})
|
||||
elif service.type == dccommon_consts.ENDPOINT_TYPE_PATCHING:
|
||||
admin_endpoint_url = "https://{}:5492".format(endpoint_ip)
|
||||
endpoint_config.append({"id": service.id,
|
||||
"admin_endpoint_url": admin_endpoint_url})
|
||||
elif service.type == dccommon_consts.ENDPOINT_TYPE_FM:
|
||||
admin_endpoint_url = "https://{}:18003".format(endpoint_ip)
|
||||
endpoint_config.append({"id": service.id,
|
||||
"admin_endpoint_url": admin_endpoint_url})
|
||||
elif service.type == dccommon_consts.ENDPOINT_TYPE_NFV:
|
||||
admin_endpoint_url = "https://{}:4546".format(endpoint_ip)
|
||||
endpoint_config.append({"id": service.id,
|
||||
"admin_endpoint_url": admin_endpoint_url})
|
||||
|
||||
if len(endpoint_config) < 5:
|
||||
raise exceptions.BadRequest(
|
||||
resource='subcloud',
|
||||
msg='Missing service in SystemController')
|
||||
|
||||
for endpoint in endpoint_config:
|
||||
try:
|
||||
m_ks_client.keystone_client.endpoints.create(
|
||||
endpoint["id"],
|
||||
endpoint['admin_endpoint_url'],
|
||||
interface=dccommon_consts.KS_ENDPOINT_ADMIN,
|
||||
region=subcloud.name)
|
||||
except Exception as e:
|
||||
# Keystone service must be temporarily busy, retry
|
||||
LOG.error(str(e))
|
||||
m_ks_client.keystone_client.endpoints.create(
|
||||
endpoint["id"],
|
||||
endpoint['admin_endpoint_url'],
|
||||
interface=dccommon_consts.KS_ENDPOINT_ADMIN,
|
||||
region=subcloud.name)
|
||||
|
||||
# Inform orchestrator that subcloud has been added
|
||||
self.dcorch_rpc_client.add_subcloud(
|
||||
context, subcloud.name, subcloud.software_version)
|
||||
|
||||
# create entry into alarm summary table, will get real values later
|
||||
alarm_updates = {'critical_alarms': -1,
|
||||
'major_alarms': -1,
|
||||
'minor_alarms': -1,
|
||||
'warnings': -1,
|
||||
'cloud_status': consts.ALARMS_DISABLED}
|
||||
db_api.subcloud_alarms_create(context, subcloud.name,
|
||||
alarm_updates)
|
||||
|
||||
# Regenerate the addn_hosts_dc file
|
||||
self._create_addn_hosts_dc(context)
|
||||
|
||||
self._populate_payload_with_cached_keystone_data(
|
||||
cached_regionone_data, payload)
|
||||
|
||||
if "install_values" in payload:
|
||||
payload['install_values']['ansible_ssh_pass'] = \
|
||||
payload['sysadmin_password']
|
||||
|
||||
deploy_command = None
|
||||
if "deploy_playbook" in payload:
|
||||
self._prepare_for_deployment(payload, subcloud.name)
|
||||
deploy_command = self.compose_deploy_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload)
|
||||
|
||||
del payload['sysadmin_password']
|
||||
payload['users'] = dict()
|
||||
for user in USERS_TO_REPLICATE:
|
||||
payload['users'][user] = \
|
||||
str(keyring.get_password(
|
||||
user, dccommon_consts.SERVICES_USER_NAME))
|
||||
|
||||
# Create the ansible inventory for the new subcloud
|
||||
utils.create_subcloud_inventory(payload,
|
||||
ansible_subcloud_inventory_file)
|
||||
|
||||
# create subcloud intermediate certificate and pass in keys
|
||||
self._create_intermediate_ca_cert(payload)
|
||||
|
||||
# Write this subclouds overrides to file
|
||||
# NOTE: This file should not be deleted if subcloud add fails
|
||||
# as it is used for debugging
|
||||
self._write_subcloud_ansible_config(cached_regionone_data, payload)
|
||||
|
||||
if migrate_flag:
|
||||
rehome_command = self.compose_rehome_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
subcloud.software_version)
|
||||
apply_thread = threading.Thread(
|
||||
target=self.run_deploy_thread,
|
||||
args=(subcloud, payload, context,
|
||||
None, None, None, rehome_command))
|
||||
|
||||
self.run_deploy_thread(subcloud, payload, context,
|
||||
rehome_command=rehome_command)
|
||||
|
||||
def add_subcloud(self, context, subcloud_id, payload):
|
||||
"""Add subcloud and notify orchestrators.
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: id of the subcloud
|
||||
:param payload: subcloud configuration
|
||||
"""
|
||||
LOG.info(f"Adding subcloud {payload['name']}.")
|
||||
|
||||
rehoming = payload.get('migrate', '').lower() == "true"
|
||||
payload['ansible_ssh_pass'] = payload['sysadmin_password']
|
||||
|
||||
# Create the subcloud
|
||||
subcloud = self.subcloud_deploy_create(context, subcloud_id,
|
||||
payload, rehoming)
|
||||
|
||||
# Return if create failed
|
||||
if rehoming:
|
||||
success_state = consts.DEPLOY_STATE_PRE_REHOME
|
||||
else:
|
||||
install_command = None
|
||||
if "install_values" in payload:
|
||||
install_command = self.compose_install_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
subcloud.software_version)
|
||||
apply_command = self.compose_apply_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
subcloud.software_version)
|
||||
apply_thread = threading.Thread(
|
||||
target=self.run_deploy_thread,
|
||||
args=(subcloud, payload, context,
|
||||
install_command, apply_command, deploy_command))
|
||||
success_state = consts.DEPLOY_STATE_CREATED
|
||||
if subcloud.deploy_status != success_state:
|
||||
return
|
||||
|
||||
apply_thread.start()
|
||||
# Rehome subcloud
|
||||
if rehoming:
|
||||
self.rehome_subcloud(context, subcloud, payload)
|
||||
return
|
||||
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
# Define which deploy phases should be run
|
||||
phases_to_run = []
|
||||
if consts.INSTALL_VALUES in payload:
|
||||
phases_to_run.append(consts.DEPLOY_PHASE_INSTALL)
|
||||
phases_to_run.append(consts.DEPLOY_PHASE_BOOTSTRAP)
|
||||
if consts.DEPLOY_CONFIG in payload:
|
||||
phases_to_run.append(consts.DEPLOY_PHASE_CONFIG)
|
||||
|
||||
except Exception:
|
||||
LOG.exception("Failed to create subcloud %s" % payload['name'])
|
||||
# If we failed to create the subcloud, update the
|
||||
# deployment status
|
||||
if migrate_flag:
|
||||
db_api.subcloud_update(
|
||||
context, subcloud.id,
|
||||
deploy_status=consts.DEPLOY_STATE_REHOME_PREP_FAILED)
|
||||
else:
|
||||
db_api.subcloud_update(
|
||||
context, subcloud.id,
|
||||
deploy_status=consts.DEPLOY_STATE_DEPLOY_PREP_FAILED)
|
||||
# Finish adding the subcloud by running the deploy phases
|
||||
succeeded = self.run_deploy_phases(
|
||||
context, subcloud_id, payload, phases_to_run)
|
||||
|
||||
if succeeded:
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud_id, deploy_status=consts.DEPLOY_STATE_DONE)
|
||||
|
||||
LOG.info(f"Finished adding subcloud {subcloud['name']}.")
|
||||
|
||||
def reconfigure_subcloud(self, context, subcloud_id, payload):
|
||||
"""Reconfigure subcloud
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: id of the subcloud
|
||||
:param payload: subcloud configuration
|
||||
"""
|
||||
LOG.info("Reconfiguring subcloud %s." % subcloud_id)
|
||||
@ -542,10 +408,10 @@ class SubcloudManager(manager.Manager):
|
||||
ansible_subcloud_inventory_file = self._get_ansible_filename(
|
||||
subcloud.name, INVENTORY_FILE_POSTFIX)
|
||||
|
||||
deploy_command = None
|
||||
config_command = None
|
||||
if "deploy_playbook" in payload:
|
||||
self._prepare_for_deployment(payload, subcloud.name)
|
||||
deploy_command = self.compose_deploy_command(
|
||||
config_command = self.compose_config_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload)
|
||||
@ -553,7 +419,7 @@ class SubcloudManager(manager.Manager):
|
||||
del payload['sysadmin_password']
|
||||
apply_thread = threading.Thread(
|
||||
target=self.run_deploy_thread,
|
||||
args=(subcloud, payload, context, None, None, deploy_command))
|
||||
args=(subcloud, payload, context, None, None, config_command))
|
||||
apply_thread.start()
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
except Exception:
|
||||
@ -595,10 +461,10 @@ class SubcloudManager(manager.Manager):
|
||||
payload['bootstrap-address'] = \
|
||||
payload['install_values']['bootstrap_address']
|
||||
|
||||
deploy_command = None
|
||||
config_command = None
|
||||
if "deploy_playbook" in payload:
|
||||
self._prepare_for_deployment(payload, subcloud.name)
|
||||
deploy_command = self.compose_deploy_command(
|
||||
config_command = self.compose_config_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload)
|
||||
@ -621,7 +487,7 @@ class SubcloudManager(manager.Manager):
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload['software_version'])
|
||||
apply_command = self.compose_apply_command(
|
||||
bootstrap_command = self.compose_bootstrap_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload['software_version'])
|
||||
@ -629,7 +495,7 @@ class SubcloudManager(manager.Manager):
|
||||
apply_thread = threading.Thread(
|
||||
target=self.run_deploy_thread,
|
||||
args=(subcloud, payload, context,
|
||||
install_command, apply_command, deploy_command,
|
||||
install_command, bootstrap_command, config_command,
|
||||
None, network_reconfig))
|
||||
apply_thread.start()
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
@ -762,12 +628,21 @@ class SubcloudManager(manager.Manager):
|
||||
|
||||
def _deploy_bootstrap_prep(self, context, subcloud, payload: dict,
|
||||
ansible_subcloud_inventory_file):
|
||||
"""Run the preparation steps needed to run the bootstrap operation
|
||||
|
||||
:param context: target request context object
|
||||
:param subcloud: subcloud model object
|
||||
:param payload: bootstrap request parameters
|
||||
:param ansible_subcloud_inventory_file: the ansible inventory file path
|
||||
:return: ansible command needed to run the bootstrap playbook
|
||||
"""
|
||||
management_subnet = utils.get_management_subnet(payload)
|
||||
sys_controller_gw_ip = payload.get(
|
||||
"systemcontroller_gateway_address")
|
||||
|
||||
if (management_subnet != subcloud.management_subnet) or (
|
||||
sys_controller_gw_ip != subcloud.systemcontroller_gateway_ip):
|
||||
sys_controller_gw_ip != subcloud.systemcontroller_gateway_ip
|
||||
):
|
||||
m_ks_client = OpenStackDriver(
|
||||
region_name=dccommon_consts.DEFAULT_REGION_NAME,
|
||||
region_clients=None).keystone_client
|
||||
@ -814,23 +689,37 @@ class SubcloudManager(manager.Manager):
|
||||
utils.create_subcloud_inventory(payload,
|
||||
ansible_subcloud_inventory_file)
|
||||
|
||||
apply_command = self.compose_apply_command(
|
||||
bootstrap_command = self.compose_bootstrap_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
subcloud.software_version)
|
||||
return apply_command
|
||||
return bootstrap_command
|
||||
|
||||
def _deploy_config_prep(self, subcloud, payload: dict,
|
||||
ansible_subcloud_inventory_file):
|
||||
"""Run the preparation steps needed to run the config operation
|
||||
|
||||
:param subcloud: target subcloud model object
|
||||
:param payload: config request parameters
|
||||
:param ansible_subcloud_inventory_file: the ansible inventory file path
|
||||
:return: ansible command needed to run the config playbook
|
||||
"""
|
||||
self._prepare_for_deployment(payload, subcloud.name)
|
||||
deploy_command = self.compose_deploy_command(
|
||||
config_command = self.compose_config_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload)
|
||||
return deploy_command
|
||||
return config_command
|
||||
|
||||
def _deploy_install_prep(self, subcloud, payload: dict,
|
||||
ansible_subcloud_inventory_file):
|
||||
"""Run the preparation steps needed to run the install operation
|
||||
|
||||
:param subcloud: target subcloud model object
|
||||
:param payload: install request parameters
|
||||
:param ansible_subcloud_inventory_file: the ansible inventory file path
|
||||
:return: ansible command needed to run the install playbook
|
||||
"""
|
||||
payload['install_values']['ansible_ssh_pass'] = \
|
||||
payload['sysadmin_password']
|
||||
payload['install_values']['ansible_become_pass'] = \
|
||||
@ -919,18 +808,25 @@ class SubcloudManager(manager.Manager):
|
||||
self.run_deploy_phases(context, subcloud_id, payload,
|
||||
deploy_states_to_run)
|
||||
|
||||
def subcloud_deploy_create(self, context, subcloud_id, payload):
|
||||
def subcloud_deploy_create(self, context, subcloud_id, payload, rehoming=False):
|
||||
"""Create subcloud and notify orchestrators.
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: subcloud_id from db
|
||||
:param payload: subcloud configuration
|
||||
:param rehoming: flag indicating if this is part of a rehoming operation
|
||||
:return: resulting subcloud DB object
|
||||
"""
|
||||
LOG.info("Creating subcloud %s." % payload['name'])
|
||||
|
||||
if rehoming:
|
||||
deploy_state = consts.DEPLOY_STATE_PRE_REHOME
|
||||
else:
|
||||
deploy_state = consts.DEPLOY_STATE_CREATING
|
||||
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud_id,
|
||||
deploy_status=consts.DEPLOY_STATE_CREATING)
|
||||
deploy_status=deploy_state)
|
||||
|
||||
try:
|
||||
# Create a new route to this subcloud on the management interface
|
||||
@ -1022,7 +918,7 @@ class SubcloudManager(manager.Manager):
|
||||
self._prepare_for_deployment(payload, subcloud.name,
|
||||
populate_passwords=False)
|
||||
|
||||
payload['users'] = dict()
|
||||
payload['users'] = {}
|
||||
for user in USERS_TO_REPLICATE:
|
||||
payload['users'][user] = \
|
||||
str(keyring.get_password(
|
||||
@ -1044,26 +940,36 @@ class SubcloudManager(manager.Manager):
|
||||
# as it is used for debugging
|
||||
self._write_subcloud_ansible_config(cached_regionone_data, payload)
|
||||
|
||||
if not rehoming:
|
||||
deploy_state = consts.DEPLOY_STATE_CREATED
|
||||
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud_id,
|
||||
deploy_status=consts.DEPLOY_STATE_CREATED)
|
||||
deploy_status=deploy_state)
|
||||
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
return subcloud
|
||||
|
||||
except Exception:
|
||||
LOG.exception("Failed to create subcloud %s" % payload['name'])
|
||||
# If we failed to create the subcloud, update the deployment status
|
||||
|
||||
if rehoming:
|
||||
deploy_state = consts.DEPLOY_STATE_REHOME_PREP_FAILED
|
||||
else:
|
||||
deploy_state = consts.DEPLOY_STATE_CREATE_FAILED
|
||||
|
||||
subcloud = db_api.subcloud_update(
|
||||
context, subcloud.id,
|
||||
deploy_status=consts.DEPLOY_STATE_CREATE_FAILED)
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
deploy_status=deploy_state)
|
||||
return subcloud
|
||||
|
||||
def subcloud_deploy_install(self, context, subcloud_id, payload: dict):
|
||||
def subcloud_deploy_install(self, context, subcloud_id, payload: dict) -> bool:
|
||||
"""Install subcloud
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: subcloud id from db
|
||||
:param payload: subcloud Install
|
||||
:return: success status
|
||||
"""
|
||||
|
||||
# Retrieve the subcloud details from the database
|
||||
@ -1112,6 +1018,7 @@ class SubcloudManager(manager.Manager):
|
||||
:param context: request context object
|
||||
:param subcloud_id: subcloud_id from db
|
||||
:param payload: subcloud bootstrap configuration
|
||||
:return: success status
|
||||
"""
|
||||
LOG.info("Bootstrapping subcloud %s." % payload['name'])
|
||||
|
||||
@ -1126,11 +1033,11 @@ class SubcloudManager(manager.Manager):
|
||||
ansible_subcloud_inventory_file = self._get_ansible_filename(
|
||||
subcloud.name, INVENTORY_FILE_POSTFIX)
|
||||
|
||||
apply_command = self._deploy_bootstrap_prep(
|
||||
bootstrap_command = self._deploy_bootstrap_prep(
|
||||
context, subcloud, payload,
|
||||
ansible_subcloud_inventory_file)
|
||||
bootstrap_success = self._run_subcloud_bootstrap(
|
||||
context, subcloud, apply_command, log_file)
|
||||
context, subcloud, bootstrap_command, log_file)
|
||||
return bootstrap_success
|
||||
|
||||
except Exception:
|
||||
@ -1140,12 +1047,13 @@ class SubcloudManager(manager.Manager):
|
||||
deploy_status=consts.DEPLOY_STATE_PRE_BOOTSTRAP_FAILED)
|
||||
return False
|
||||
|
||||
def subcloud_deploy_config(self, context, subcloud_id, payload: dict) -> dict:
|
||||
def subcloud_deploy_config(self, context, subcloud_id, payload: dict) -> bool:
|
||||
"""Configure subcloud
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: subcloud_id from db
|
||||
:param payload: subcloud configuration
|
||||
:return: success status
|
||||
"""
|
||||
LOG.info("Configuring subcloud %s." % subcloud_id)
|
||||
|
||||
@ -1162,13 +1070,13 @@ class SubcloudManager(manager.Manager):
|
||||
subcloud.name, INVENTORY_FILE_POSTFIX)
|
||||
|
||||
self._prepare_for_deployment(payload, subcloud.name)
|
||||
deploy_command = self.compose_deploy_command(
|
||||
config_command = self.compose_config_command(
|
||||
subcloud.name,
|
||||
ansible_subcloud_inventory_file,
|
||||
payload)
|
||||
|
||||
config_success = self._run_subcloud_config(subcloud, context,
|
||||
deploy_command, log_file)
|
||||
config_command, log_file)
|
||||
return config_success
|
||||
|
||||
except Exception:
|
||||
@ -1697,21 +1605,21 @@ class SubcloudManager(manager.Manager):
|
||||
LOG.exception(e)
|
||||
|
||||
def run_deploy_thread(self, subcloud, payload, context,
|
||||
install_command=None, apply_command=None,
|
||||
deploy_command=None, rehome_command=None,
|
||||
install_command=None, bootstrap_command=None,
|
||||
config_command=None, rehome_command=None,
|
||||
network_reconfig=None):
|
||||
try:
|
||||
self._run_deploy(subcloud, payload, context,
|
||||
install_command, apply_command,
|
||||
deploy_command, rehome_command,
|
||||
install_command, bootstrap_command,
|
||||
config_command, rehome_command,
|
||||
network_reconfig)
|
||||
except Exception as ex:
|
||||
LOG.exception("run_deploy failed")
|
||||
raise ex
|
||||
|
||||
def _run_deploy(self, subcloud, payload, context,
|
||||
install_command, apply_command,
|
||||
deploy_command, rehome_command,
|
||||
install_command, bootstrap_command,
|
||||
config_command, rehome_command,
|
||||
network_reconfig):
|
||||
log_file = (
|
||||
os.path.join(consts.DC_ANSIBLE_LOG_DIR, subcloud.name)
|
||||
@ -1724,7 +1632,7 @@ class SubcloudManager(manager.Manager):
|
||||
)
|
||||
if not install_success:
|
||||
return
|
||||
if apply_command:
|
||||
if bootstrap_command:
|
||||
try:
|
||||
# Update the subcloud to bootstrapping
|
||||
db_api.subcloud_update(
|
||||
@ -1739,7 +1647,7 @@ class SubcloudManager(manager.Manager):
|
||||
# Run the ansible boostrap-subcloud playbook
|
||||
LOG.info("Starting bootstrap of %s" % subcloud.name)
|
||||
try:
|
||||
run_playbook(log_file, apply_command)
|
||||
run_playbook(log_file, bootstrap_command)
|
||||
except PlaybookExecutionFailed:
|
||||
msg = utils.find_ansible_error_msg(
|
||||
subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING)
|
||||
@ -1750,7 +1658,7 @@ class SubcloudManager(manager.Manager):
|
||||
error_description=msg[0:consts.ERROR_DESCRIPTION_LENGTH])
|
||||
return
|
||||
LOG.info("Successfully bootstrapped %s" % subcloud.name)
|
||||
if deploy_command:
|
||||
if config_command:
|
||||
# Run the custom deploy playbook
|
||||
LOG.info("Starting deploy of %s" % subcloud.name)
|
||||
db_api.subcloud_update(
|
||||
@ -1759,7 +1667,7 @@ class SubcloudManager(manager.Manager):
|
||||
error_description=consts.ERROR_DESC_EMPTY)
|
||||
|
||||
try:
|
||||
run_playbook(log_file, deploy_command)
|
||||
run_playbook(log_file, config_command)
|
||||
except PlaybookExecutionFailed:
|
||||
msg = utils.find_ansible_error_msg(
|
||||
subcloud.name, log_file, consts.DEPLOY_STATE_DEPLOYING)
|
||||
@ -1816,32 +1724,33 @@ class SubcloudManager(manager.Manager):
|
||||
error_description=consts.ERROR_DESC_EMPTY)
|
||||
|
||||
def run_deploy_phases(self, context, subcloud_id, payload,
|
||||
deploy_states_to_run):
|
||||
"""Run individual phases durring deploy operation."""
|
||||
deploy_phases_to_run):
|
||||
"""Run one or more deployment phases, ensuring correct order
|
||||
|
||||
:param context: request context object
|
||||
:param subcloud_id: subcloud id from db
|
||||
:param payload: deploy phases payload
|
||||
:param deploy_phases_to_run: deploy phases that should run
|
||||
"""
|
||||
try:
|
||||
for state in deploy_states_to_run:
|
||||
if state == consts.DEPLOY_PHASE_INSTALL:
|
||||
install_success = self.subcloud_deploy_install(
|
||||
succeeded = True
|
||||
if consts.DEPLOY_PHASE_INSTALL in deploy_phases_to_run:
|
||||
succeeded = self.subcloud_deploy_install(
|
||||
context, subcloud_id, payload)
|
||||
if not install_success:
|
||||
return
|
||||
elif state == consts.DEPLOY_PHASE_BOOTSTRAP:
|
||||
bootstrap_success = self.subcloud_deploy_bootstrap(
|
||||
if succeeded and consts.DEPLOY_PHASE_BOOTSTRAP in deploy_phases_to_run:
|
||||
succeeded = self.subcloud_deploy_bootstrap(
|
||||
context, subcloud_id, payload)
|
||||
if not bootstrap_success:
|
||||
return
|
||||
elif state == consts.DEPLOY_PHASE_CONFIG:
|
||||
config_success = self.subcloud_deploy_config(
|
||||
if succeeded and consts.DEPLOY_PHASE_CONFIG in deploy_phases_to_run:
|
||||
succeeded = self.subcloud_deploy_config(
|
||||
context, subcloud_id, payload)
|
||||
if not config_success:
|
||||
return
|
||||
return succeeded
|
||||
|
||||
except Exception as ex:
|
||||
LOG.exception("run_deploy failed")
|
||||
LOG.exception("run_deploy_phases failed")
|
||||
raise ex
|
||||
|
||||
def _run_subcloud_config(self, subcloud, context,
|
||||
deploy_command, log_file):
|
||||
config_command, log_file):
|
||||
# Run the custom deploy playbook
|
||||
LOG.info("Starting deploy of %s" % subcloud.name)
|
||||
db_api.subcloud_update(
|
||||
@ -1852,7 +1761,7 @@ class SubcloudManager(manager.Manager):
|
||||
try:
|
||||
run_ansible = RunAnsible()
|
||||
aborted = run_ansible.exec_playbook(
|
||||
log_file, deploy_command, subcloud.name)
|
||||
log_file, config_command, subcloud.name)
|
||||
except PlaybookExecutionFailed:
|
||||
msg = utils.find_ansible_error_msg(
|
||||
subcloud.name, log_file, consts.DEPLOY_STATE_CONFIGURING)
|
||||
@ -1921,7 +1830,7 @@ class SubcloudManager(manager.Manager):
|
||||
return True
|
||||
|
||||
def _run_subcloud_bootstrap(self, context, subcloud,
|
||||
apply_command, log_file):
|
||||
bootstrap_command, log_file):
|
||||
# Update the subcloud deploy_status to bootstrapping
|
||||
db_api.subcloud_update(
|
||||
context, subcloud.id,
|
||||
@ -1933,7 +1842,7 @@ class SubcloudManager(manager.Manager):
|
||||
try:
|
||||
run_ansible = RunAnsible()
|
||||
aborted = run_ansible.exec_playbook(
|
||||
log_file, apply_command, subcloud.name)
|
||||
log_file, bootstrap_command, subcloud.name)
|
||||
except PlaybookExecutionFailed:
|
||||
msg = utils.find_ansible_error_msg(
|
||||
subcloud.name, log_file, consts.DEPLOY_STATE_BOOTSTRAPPING)
|
||||
|
@ -124,8 +124,9 @@ class ManagerClient(RPCClient):
|
||||
consts.TOPIC_DC_MANAGER,
|
||||
self.BASE_RPC_API_VERSION)
|
||||
|
||||
def add_subcloud(self, ctxt, payload):
|
||||
def add_subcloud(self, ctxt, subcloud_id, payload):
|
||||
return self.cast(ctxt, self.make_msg('add_subcloud',
|
||||
subcloud_id=subcloud_id,
|
||||
payload=payload))
|
||||
|
||||
def delete_subcloud(self, ctxt, subcloud_id):
|
||||
|
@ -42,7 +42,7 @@ FAKE_SUBCLOUD_INSTALL_VALUES = fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES
|
||||
class FakeRPCClient(object):
|
||||
def subcloud_deploy_create(self, context, subcloud_id, _):
|
||||
subcloud = db_api.subcloud_get(context, subcloud_id)
|
||||
return db_api.subcloud_db_model_to_dict(subcloud)
|
||||
return subcloud
|
||||
|
||||
|
||||
# Apply the TestSubcloudPost parameter validation tests to the subcloud deploy
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -78,9 +78,9 @@ class TestDCManagerService(base.DCManagerTestCase):
|
||||
def test_add_subcloud(self, mock_subcloud_manager):
|
||||
self.service_obj.init_managers()
|
||||
self.service_obj.add_subcloud(
|
||||
self.context, payload={'name': 'testname'})
|
||||
self.context, subcloud_id=1, payload={'name': 'testname'})
|
||||
mock_subcloud_manager().add_subcloud.\
|
||||
assert_called_once_with(self.context, mock.ANY)
|
||||
assert_called_once_with(self.context, 1, mock.ANY)
|
||||
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
def test_delete_subcloud(self, mock_subcloud_manager):
|
||||
|
@ -12,6 +12,7 @@
|
||||
# under the License.
|
||||
#
|
||||
import base64
|
||||
import collections
|
||||
import copy
|
||||
import datetime
|
||||
|
||||
@ -28,6 +29,7 @@ sys.modules['fm_core'] = mock.Mock()
|
||||
import threading
|
||||
|
||||
from dccommon import consts as dccommon_consts
|
||||
from dccommon import subcloud_install
|
||||
from dccommon.utils import RunAnsible
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import exceptions
|
||||
@ -103,6 +105,7 @@ class FakeProject(object):
|
||||
self.name = projname
|
||||
self.id = projid
|
||||
|
||||
|
||||
FAKE_PROJECTS = [
|
||||
FakeProject(
|
||||
dccommon_consts.ADMIN_PROJECT_NAME,
|
||||
@ -397,6 +400,10 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
self.mock_context.get_admin_context.return_value = self.ctx
|
||||
self.addCleanup(p.stop)
|
||||
|
||||
# Reset the regionone_data cache between tests
|
||||
subcloud_manager.SubcloudManager.regionone_data = \
|
||||
collections.defaultdict(dict)
|
||||
|
||||
@staticmethod
|
||||
def create_subcloud_static(ctxt, **kwargs):
|
||||
values = {
|
||||
@ -453,7 +460,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
sm.subcloud_deploy_install(self.ctx, subcloud.id, payload=fake_payload)
|
||||
mock_compose_install_command.assert_called_once_with(
|
||||
subcloud_name,
|
||||
sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
FAKE_PREVIOUS_SW_VERSION)
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
@ -497,7 +504,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA
|
||||
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
subcloud_dict = sm.subcloud_deploy_create(self.ctx, subcloud.id,
|
||||
subcloud = sm.subcloud_deploy_create(self.ctx, subcloud.id,
|
||||
payload=values)
|
||||
mock_get_cached_regionone_data.assert_called_once()
|
||||
mock_sysinv_client().create_route.assert_called()
|
||||
@ -510,7 +517,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
self.assertEqual(consts.DEPLOY_STATE_CREATED,
|
||||
subcloud_dict['deploy-status'])
|
||||
subcloud.deploy_status)
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name'])
|
||||
@ -529,12 +536,12 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
mock_keystone_client.side_effect = FakeException('boom')
|
||||
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
subcloud_dict = sm.subcloud_deploy_create(self.ctx, subcloud.id,
|
||||
subcloud = sm.subcloud_deploy_create(self.ctx, subcloud.id,
|
||||
payload=values)
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
self.assertEqual(consts.DEPLOY_STATE_CREATE_FAILED,
|
||||
subcloud_dict['deploy-status'])
|
||||
subcloud.deploy_status)
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name'])
|
||||
@ -548,7 +555,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
@mock.patch.object(RunAnsible, 'exec_playbook')
|
||||
def test_subcloud_deploy_bootstrap(self, mock_exec_playbook, mock_update_yml,
|
||||
mock_get_playbook_for_software_version,
|
||||
mock_keyring, create_subcloud_inventory):
|
||||
mock_keyring, mock_create_subcloud_inventory):
|
||||
mock_get_playbook_for_software_version.return_value = "22.12"
|
||||
mock_keyring.get_password.return_value = "testpass"
|
||||
mock_exec_playbook.return_value = False
|
||||
@ -675,73 +682,72 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
self.assertEqual(consts.DEPLOY_STATE_DONE,
|
||||
updated_subcloud.deploy_status)
|
||||
|
||||
@mock.patch.object(subcloud_install.SubcloudInstall, 'prep')
|
||||
@mock.patch.object(subcloud_install, 'KeystoneClient')
|
||||
@mock.patch.object(subcloud_install, 'SysinvClient')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'compose_apply_command')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'compose_rehome_command')
|
||||
'_write_subcloud_ansible_config')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'_create_intermediate_ca_cert')
|
||||
@mock.patch.object(cutils, 'delete_subcloud_inventory')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'_create_addn_hosts_dc')
|
||||
@mock.patch.object(subcloud_manager, 'OpenStackDriver')
|
||||
@mock.patch.object(subcloud_manager, 'SysinvClient')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'_get_cached_regionone_data')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'_create_addn_hosts_dc')
|
||||
'_write_deploy_files')
|
||||
@mock.patch.object(cutils, 'create_subcloud_inventory')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'_write_subcloud_ansible_config')
|
||||
@mock.patch.object(subcloud_manager,
|
||||
'keyring')
|
||||
@mock.patch.object(threading.Thread,
|
||||
'start')
|
||||
def test_add_subcloud(self, mock_thread_start, mock_keyring,
|
||||
mock_write_subcloud_ansible_config,
|
||||
mock_create_subcloud_inventory,
|
||||
mock_create_addn_hosts,
|
||||
mock_get_cached_regionone_data,
|
||||
mock_sysinv_client,
|
||||
mock_keystone_client,
|
||||
mock_delete_subcloud_inventory,
|
||||
@mock.patch.object(subcloud_manager, 'keyring')
|
||||
@mock.patch.object(cutils, 'get_playbook_for_software_version')
|
||||
@mock.patch.object(cutils, 'update_values_on_yaml_file')
|
||||
@mock.patch.object(RunAnsible, 'exec_playbook')
|
||||
def test_add_subcloud(self, mock_exec_playbook, mock_update_yml,
|
||||
mock_get_playbook_for_software_version,
|
||||
mock_keyring, mock_create_subcloud_inventory,
|
||||
mock_write_deploy_files, mock_sysinv_client,
|
||||
mock_openstack_driver, mock_create_addn_hosts,
|
||||
mock_create_intermediate_ca_cert,
|
||||
mock_compose_rehome_command,
|
||||
mock_compose_apply_command):
|
||||
values = utils.create_subcloud_dict(base.SUBCLOUD_SAMPLE_DATA_0)
|
||||
values['deploy_status'] = consts.DEPLOY_STATE_NONE
|
||||
mock_write_subcloud_ansible_config,
|
||||
mock_install_ks_client, mock_install_sysinvclient,
|
||||
mock_install_prep):
|
||||
# Prepare the payload
|
||||
install_values = copy.copy(fake_subcloud.FAKE_SUBCLOUD_INSTALL_VALUES)
|
||||
install_values['software_version'] = SW_VERSION
|
||||
payload = {**fake_subcloud.FAKE_BOOTSTRAP_VALUE,
|
||||
**fake_subcloud.FAKE_BOOTSTRAP_FILE_DATA,
|
||||
"sysadmin_password": "testpass",
|
||||
'bmc_password': 'bmc_pass',
|
||||
'install_values': install_values,
|
||||
'software_version': FAKE_PREVIOUS_SW_VERSION,
|
||||
"deploy_playbook": "test_playbook.yaml",
|
||||
"deploy_overrides": "test_overrides.yaml",
|
||||
"deploy_chart": "test_chart.yaml",
|
||||
"deploy_config": "subcloud1.yaml"}
|
||||
|
||||
# dcmanager add_subcloud queries the data from the db
|
||||
subcloud = self.create_subcloud_static(self.ctx, name=values['name'])
|
||||
# Create subcloud in DB
|
||||
subcloud = self.create_subcloud_static(self.ctx, name=payload['name'])
|
||||
|
||||
mock_keystone_client().keystone_client = FakeKeystoneClient()
|
||||
mock_keyring.get_password.return_value = "testpassword"
|
||||
mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA
|
||||
# Mock return values
|
||||
mock_get_playbook_for_software_version.return_value = SW_VERSION
|
||||
mock_keyring.get_password.return_value = payload['sysadmin_password']
|
||||
mock_exec_playbook.return_value = False
|
||||
mock_openstack_driver().keystone_client = FakeKeystoneClient()
|
||||
|
||||
# Call the add method
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
subcloud_dict = sm.add_subcloud(self.ctx, payload=values)
|
||||
mock_get_cached_regionone_data.assert_called_once()
|
||||
mock_sysinv_client().create_route.assert_called()
|
||||
self.fake_dcorch_api.add_subcloud.assert_called_once()
|
||||
mock_create_addn_hosts.assert_called_once()
|
||||
mock_create_subcloud_inventory.assert_called_once()
|
||||
mock_write_subcloud_ansible_config.assert_called_once()
|
||||
mock_keyring.get_password.assert_called()
|
||||
mock_thread_start.assert_called_once()
|
||||
mock_create_intermediate_ca_cert.assert_called_once()
|
||||
mock_compose_rehome_command.assert_not_called()
|
||||
mock_compose_apply_command.assert_called_once_with(
|
||||
values['name'],
|
||||
sm._get_ansible_filename(values['name'], consts.INVENTORY_FILE_POSTFIX),
|
||||
subcloud['software_version'])
|
||||
sm.add_subcloud(self.ctx, subcloud.id, payload)
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
self.assertEqual(consts.DEPLOY_STATE_PRE_DEPLOY,
|
||||
subcloud_dict['deploy-status'])
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name'])
|
||||
self.assertEqual(consts.DEPLOY_STATE_PRE_DEPLOY,
|
||||
# Verify results
|
||||
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, subcloud.name)
|
||||
self.assertEqual(consts.DEPLOY_STATE_DONE,
|
||||
updated_subcloud.deploy_status)
|
||||
|
||||
mock_write_deploy_files.assert_called()
|
||||
mock_keyring.get_password.assert_called()
|
||||
mock_update_yml.assert_called()
|
||||
mock_create_subcloud_inventory.assert_called()
|
||||
mock_get_playbook_for_software_version.assert_called_once()
|
||||
self.assertEqual(mock_exec_playbook.call_count, 3)
|
||||
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
'compose_rehome_command')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager,
|
||||
@ -758,10 +764,8 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
'_write_subcloud_ansible_config')
|
||||
@mock.patch.object(subcloud_manager,
|
||||
'keyring')
|
||||
@mock.patch.object(threading.Thread,
|
||||
'start')
|
||||
def test_add_subcloud_with_migration_option(
|
||||
self, mock_thread_start, mock_keyring,
|
||||
self, mock_keyring,
|
||||
mock_write_subcloud_ansible_config,
|
||||
mock_create_subcloud_inventory,
|
||||
mock_create_addn_hosts,
|
||||
@ -783,24 +787,22 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA
|
||||
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
subcloud_dict = sm.add_subcloud(self.ctx, payload=values)
|
||||
with mock.patch.object(sm, 'run_deploy_thread') as mock_run_deploy:
|
||||
sm.add_subcloud(self.ctx, subcloud.id, payload=values)
|
||||
|
||||
mock_get_cached_regionone_data.assert_called_once()
|
||||
mock_sysinv_client().create_route.assert_called()
|
||||
self.fake_dcorch_api.add_subcloud.assert_called_once()
|
||||
mock_create_addn_hosts.assert_called_once()
|
||||
mock_create_subcloud_inventory.assert_called_once()
|
||||
mock_write_subcloud_ansible_config.assert_called_once()
|
||||
mock_thread_start.assert_called_once()
|
||||
mock_run_deploy.assert_called_once()
|
||||
mock_create_intermediate_ca_cert.assert_called_once()
|
||||
mock_compose_rehome_command.assert_called_once_with(
|
||||
values['name'],
|
||||
sm._get_ansible_filename(values['name'], consts.INVENTORY_FILE_POSTFIX),
|
||||
subcloud['software_version'])
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
self.assertEqual(consts.DEPLOY_STATE_PRE_REHOME,
|
||||
subcloud_dict['deploy-status'])
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
updated_subcloud = db_api.subcloud_get_by_name(self.ctx, values['name'])
|
||||
self.assertEqual(consts.DEPLOY_STATE_PRE_REHOME,
|
||||
@ -809,7 +811,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
@mock.patch.object(subcloud_manager, 'OpenStackDriver')
|
||||
@mock.patch.object(subcloud_manager, 'SysinvClient')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager, '_get_cached_regionone_data')
|
||||
def test_add_subcloud_deploy_prep_failed(self,
|
||||
def test_add_subcloud_create_failed(self,
|
||||
mock_get_cached_regionone_data,
|
||||
mock_sysinv_client,
|
||||
mock_keystone_client):
|
||||
@ -817,20 +819,20 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
services = FAKE_SERVICES
|
||||
|
||||
# dcmanager add_subcloud queries the data from the db
|
||||
self.create_subcloud_static(self.ctx, name=values['name'])
|
||||
subcloud = self.create_subcloud_static(self.ctx, name=values['name'])
|
||||
|
||||
self.fake_dcorch_api.add_subcloud.side_effect = FakeException('boom')
|
||||
mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA
|
||||
mock_keystone_client().services_list = services
|
||||
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
sm.add_subcloud(self.ctx, payload=values)
|
||||
sm.add_subcloud(self.ctx, subcloud.id, payload=values)
|
||||
mock_get_cached_regionone_data.assert_called_once()
|
||||
mock_sysinv_client().create_route.assert_called()
|
||||
|
||||
# Verify subcloud was updated with correct values
|
||||
subcloud = db_api.subcloud_get_by_name(self.ctx, values['name'])
|
||||
self.assertEqual(consts.DEPLOY_STATE_DEPLOY_PREP_FAILED,
|
||||
self.assertEqual(consts.DEPLOY_STATE_CREATE_FAILED,
|
||||
subcloud.deploy_status)
|
||||
|
||||
@mock.patch.object(subcloud_manager, 'OpenStackDriver')
|
||||
@ -843,14 +845,14 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
services = FAKE_SERVICES
|
||||
|
||||
# dcmanager add_subcloud queries the data from the db
|
||||
self.create_subcloud_static(self.ctx, name=values['name'])
|
||||
subcloud = self.create_subcloud_static(self.ctx, name=values['name'])
|
||||
|
||||
self.fake_dcorch_api.add_subcloud.side_effect = FakeException('boom')
|
||||
mock_get_cached_regionone_data.return_value = FAKE_CACHED_REGIONONE_DATA
|
||||
mock_keystone_client().services_list = services
|
||||
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
sm.add_subcloud(self.ctx, payload=values)
|
||||
sm.add_subcloud(self.ctx, subcloud.id, payload=values)
|
||||
mock_get_cached_regionone_data.assert_called_once()
|
||||
mock_sysinv_client().create_route.assert_called()
|
||||
|
||||
@ -1676,8 +1678,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
mock_prepare_for_deployment.assert_called_once()
|
||||
|
||||
def test_get_ansible_filename(self):
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
filename = sm._get_ansible_filename('subcloud1',
|
||||
filename = cutils.get_ansible_filename('subcloud1',
|
||||
consts.INVENTORY_FILE_POSTFIX)
|
||||
self.assertEqual(filename,
|
||||
f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml')
|
||||
@ -1701,15 +1702,15 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
)
|
||||
|
||||
@mock.patch('os.path.isfile')
|
||||
def test_compose_apply_command(self, mock_isfile):
|
||||
def test_compose_bootstrap_command(self, mock_isfile):
|
||||
mock_isfile.return_value = True
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
apply_command = sm.compose_apply_command(
|
||||
bootstrap_command = sm.compose_bootstrap_command(
|
||||
'subcloud1',
|
||||
f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml',
|
||||
FAKE_PREVIOUS_SW_VERSION)
|
||||
self.assertEqual(
|
||||
apply_command,
|
||||
bootstrap_command,
|
||||
[
|
||||
'ansible-playbook',
|
||||
cutils.get_playbook_for_software_version(
|
||||
@ -1722,19 +1723,19 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
]
|
||||
)
|
||||
|
||||
def test_compose_deploy_command(self):
|
||||
def test_compose_config_command(self):
|
||||
sm = subcloud_manager.SubcloudManager()
|
||||
fake_payload = {"sysadmin_password": "testpass",
|
||||
"deploy_playbook": "test_playbook.yaml",
|
||||
"deploy_overrides": "test_overrides.yaml",
|
||||
"deploy_chart": "test_chart.yaml",
|
||||
"deploy_config": "subcloud1.yaml"}
|
||||
deploy_command = sm.compose_deploy_command(
|
||||
config_command = sm.compose_config_command(
|
||||
'subcloud1',
|
||||
f'{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_inventory.yml',
|
||||
fake_payload)
|
||||
self.assertEqual(
|
||||
deploy_command,
|
||||
config_command,
|
||||
[
|
||||
'ansible-playbook', 'test_playbook.yaml', '-e',
|
||||
f'@{dccommon_consts.ANSIBLE_OVERRIDES_PATH}/subcloud1_deploy_values.yml', '-i',
|
||||
@ -1773,7 +1774,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
@mock.patch.object(
|
||||
subcloud_manager.SubcloudManager, 'compose_install_command')
|
||||
@mock.patch.object(
|
||||
subcloud_manager.SubcloudManager, 'compose_apply_command')
|
||||
subcloud_manager.SubcloudManager, 'compose_bootstrap_command')
|
||||
@mock.patch.object(cutils, 'create_subcloud_inventory')
|
||||
@mock.patch.object(subcloud_manager.SubcloudManager, '_get_cached_regionone_data')
|
||||
@mock.patch.object(subcloud_manager, 'OpenStackDriver')
|
||||
@ -1782,7 +1783,7 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
def test_reinstall_subcloud(
|
||||
self, mock_keyring, mock_thread_start,
|
||||
mock_keystone_client, mock_get_cached_regionone_data, mock_create_subcloud_inventory,
|
||||
mock_compose_apply_command, mock_compose_install_command,
|
||||
mock_compose_bootstrap_command, mock_compose_install_command,
|
||||
mock_create_intermediate_ca_cert, mock_write_subcloud_ansible_config):
|
||||
|
||||
subcloud_name = 'subcloud1'
|
||||
@ -1812,11 +1813,11 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
mock_write_subcloud_ansible_config.assert_called_once()
|
||||
mock_compose_install_command.assert_called_once_with(
|
||||
subcloud_name,
|
||||
sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
FAKE_PREVIOUS_SW_VERSION)
|
||||
mock_compose_apply_command.assert_called_once_with(
|
||||
mock_compose_bootstrap_command.assert_called_once_with(
|
||||
subcloud_name,
|
||||
sm._get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
cutils.get_ansible_filename(subcloud_name, consts.INVENTORY_FILE_POSTFIX),
|
||||
FAKE_PREVIOUS_SW_VERSION)
|
||||
mock_thread_start.assert_called_once()
|
||||
|
||||
@ -2336,13 +2337,10 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
self.assertEqual(mock_run_ansible.call_count, 2)
|
||||
# Verify the "image_list_file" was passed to the prestage image playbook
|
||||
# for the remote prestage
|
||||
self.assertTrue(
|
||||
'image_list_file' in mock_run_ansible.call_args_list[1].args[1][5])
|
||||
self.assertIn('image_list_file', mock_run_ansible.call_args_list[1].args[1][5])
|
||||
# Verify the prestage request release was passed to the playbooks
|
||||
self.assertTrue(
|
||||
FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5])
|
||||
self.assertTrue(
|
||||
FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5])
|
||||
self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[0].args[1][5])
|
||||
self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[1].args[1][5])
|
||||
|
||||
@mock.patch.object(os_path, 'isdir')
|
||||
@mock.patch.object(cutils, 'get_filename_by_prefix')
|
||||
@ -2439,10 +2437,8 @@ class TestSubcloudManager(base.DCManagerTestCase):
|
||||
self.assertTrue(
|
||||
'image_list_file' not in mock_run_ansible.call_args_list[1].args[1][5])
|
||||
# Verify the prestage request release was passed to the playbooks
|
||||
self.assertTrue(
|
||||
FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[0].args[1][5])
|
||||
self.assertTrue(
|
||||
FAKE_PRESTAGE_RELEASE in mock_run_ansible.call_args_list[1].args[1][5])
|
||||
self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[0].args[1][5])
|
||||
self.assertIn(FAKE_PRESTAGE_RELEASE, mock_run_ansible.call_args_list[1].args[1][5])
|
||||
|
||||
@mock.patch.object(prestage, 'prestage_images')
|
||||
@mock.patch.object(prestage, 'prestage_packages')
|
||||
|
Loading…
Reference in New Issue
Block a user