Merge "Rework evaluating apps reapply"

This commit is contained in:
Zuul 2021-02-26 17:47:28 +00:00 committed by Gerrit Code Review
commit 274f9adce9
12 changed files with 486 additions and 68 deletions

View File

@ -16,7 +16,7 @@
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2020 Wind River Systems, Inc.
# Copyright (c) 2013-2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
@ -2068,17 +2068,6 @@ class HostController(rest.RestController):
ihost_ret = pecan.request.rpcapi.configure_ihost(
pecan.request.context, ihost_obj)
# Trigger a system app reapply if the host has been unlocked.
# Only trigger the reapply if it is not during restore and the
# openstack app is applied
if (cutils.is_openstack_applied(pecan.request.dbapi) and
not os.path.isfile(tsc.RESTORE_IN_PROGRESS_FLAG) and
patched_ihost.get('action') in
[constants.UNLOCK_ACTION, constants.FORCE_UNLOCK_ACTION]):
pecan.request.rpcapi.evaluate_app_reapply(
pecan.request.context,
constants.HELM_APP_OPENSTACK)
pecan.request.dbapi.ihost_update(
ihost_obj['uuid'], {'capabilities': ihost_obj['capabilities']})
@ -2087,6 +2076,18 @@ class HostController(rest.RestController):
hostupdate.notify_mtce = True
# Evaluate app reapply on lock/unlock/swact/reinstall
if (not os.path.isfile(tsc.RESTORE_IN_PROGRESS_FLAG) and
patched_ihost.get('action') in
[constants.LOCK_ACTION, constants.FORCE_LOCK_ACTION,
constants.UNLOCK_ACTION, constants.FORCE_UNLOCK_ACTION,
constants.SWACT_ACTION, constants.FORCE_SWACT_ACTION,
constants.REINSTALL_ACTION]):
pecan.request.rpcapi.evaluate_apps_reapply(
pecan.request.context,
trigger={'type': patched_ihost.get('action'),
'configure_required': True if hostupdate.configure_required else False})
pecan.request.dbapi.ihost_update(ihost_obj['uuid'],
{'capabilities': ihost_obj['capabilities']})
@ -2120,6 +2121,11 @@ class HostController(rest.RestController):
new_ihost_mtc['action'] = constants.UNLOCK_ACTION
if new_ihost_mtc['operation'] == 'add':
# Evaluate apps reapply on new host
pecan.request.rpcapi.evaluate_apps_reapply(
pecan.request.context,
trigger={'type': constants.APP_EVALUATE_REAPPLY_TYPE_HOST_ADD})
mtc_response = mtce_api.host_add(
self._api_token, self._mtc_address, self._mtc_port,
new_ihost_mtc,
@ -2529,19 +2535,12 @@ class HostController(rest.RestController):
pecan.request.dbapi.ihost_destroy(ihost_id)
# Check if platform apps need to be reapplied
if personality == constants.CONTROLLER:
for app_name in constants.HELM_APPS_PLATFORM_MANAGED:
if cutils.is_app_applied(pecan.request.dbapi, app_name):
pecan.request.rpcapi.evaluate_app_reapply(
pecan.request.context, app_name)
# If the host being removed was an openstack worker node, check to see
# if a reapply is needed
if openstack_worker and cutils.is_app_applied(
pecan.request.dbapi, constants.HELM_APP_OPENSTACK):
pecan.request.rpcapi.evaluate_app_reapply(
pecan.request.context, constants.HELM_APP_OPENSTACK)
# Check if platform apps need to be reapplied after host delete
pecan.request.rpcapi.evaluate_apps_reapply(
pecan.request.context,
trigger={'type': constants.APP_EVALUATE_REAPPLY_TYPE_HOST_DELETE,
'openstack_worker': True if openstack_worker else False,
'personality': personality})
def _notify_mtce_host_delete(self, ihost):

View File

@ -16,7 +16,7 @@
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2017 Wind River Systems, Inc.
# Copyright (c) 2013-2021 Wind River Systems, Inc.
#
import jsonpatch
@ -518,10 +518,12 @@ class SystemController(rest.RestController):
capabilities = {}
distributed_cloud_role = ""
security_feature = ""
delta_fields = {}
for field in objects.system.fields:
if rpc_isystem[field] != patched_system[field]:
rpc_isystem[field] = patched_system[field]
delta_fields[field] = patched_system[field]
if field == 'name':
name = rpc_isystem[field]
if field == 'contact':
@ -543,6 +545,11 @@ class SystemController(rest.RestController):
delta_handle = list(delta)
rpc_isystem.save()
pecan.request.rpcapi.evaluate_apps_reapply(
pecan.request.context,
trigger={'type': constants.APP_EVALUATE_REAPPLY_TYPE_SYSTEM_MODIFY,
'delta_fields': delta_fields})
if name:
LOG.info("update system name")
pecan.request.rpcapi.configure_isystemname(pecan.request.context,

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
#
# Copyright (c) 2018 Wind River Systems, Inc.
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
@ -36,7 +36,7 @@ def create_app_overrides_action(path, app_name=None, namespace=None):
return
helm_operator = helm.HelmOperator(dbapi=dbapi)
app_operator = kube_app.AppOperator(dbapi, helm_operator)
app_operator = kube_app.AppOperator(dbapi, helm_operator, {})
if not app_operator.app_has_system_plugins(db_app):
LOG.info("Overrides generation for application %s is "
@ -62,7 +62,7 @@ def create_armada_app_overrides_action(path, app_name=None, namespace=None):
return
helm_operator = helm.HelmOperator(dbapi=dbapi)
app_operator = kube_app.AppOperator(dbapi, helm_operator)
app_operator = kube_app.AppOperator(dbapi, helm_operator, {})
if not app_operator.app_has_system_plugins(db_app):
LOG.info("Overrides generation for application %s is "

View File

@ -1503,14 +1503,6 @@ HELM_APPS_PLATFORM_MANAGED = [
HELM_APP_ROOK_CEPH,
]
# The order in which apps are listed here is important.
# They will be applied as listed below: first platform-integ-apps
# then stx-openstack.
HELM_APPS_WITH_REAPPLY_SUPPORT = \
HELM_APPS_PLATFORM_MANAGED + [
HELM_APP_OPENSTACK
]
HELM_APP_ISO_INSTALL_PATH = '/usr/local/share/applications/helm'
# RBD Provisioner Ceph backend capabilities fields
@ -1558,6 +1550,7 @@ APP_DELETE_OP = 'delete'
APP_UPDATE_OP = 'update'
APP_ROLLBACK_OP = 'rollback'
APP_ABORT_OP = 'abort'
APP_EVALUATE_REAPPLY_OP = 'evaluate-reapply'
# Lifecycle constants
APP_LIFECYCLE_TIMING_PRE = 'pre'
@ -1580,6 +1573,56 @@ APP_LIFECYCLE_MODE_AUTO = 'auto'
# Application metadata constants
APP_METADATA_MAINTAIN_USER_OVERRIDES = 'maintain_user_overrides'
APP_METADATA_HELM_TOOLKIT_REQUIRED = 'helm_toolkit_required'
APP_METADATA_APPS = 'apps'
APP_METADATA_BEHAVIOR = 'behavior'
APP_METADATA_EVALUATE_REAPPLY = 'evaluate_reapply'
APP_METADATA_AFTER = 'after'
APP_METADATA_TRIGGERS = 'triggers'
APP_METADATA_TYPE = 'type'
APP_METADATA_FILTERS = 'filters'
APP_METADATA_FILTER_FIELD = 'filter_field'
APP_METADATA_PLATFORM_MANAGED_APP = 'platform_managed_app'
APP_METADATA_DESIRED_STATE = 'desired_state'
APP_EVALUATE_REAPPLY_TYPE_HOST_ADD = 'host-add'
APP_EVALUATE_REAPPLY_TYPE_HOST_DELETE = 'host-delete'
APP_EVALUATE_REAPPLY_TYPE_HOST_REINSTALL = REINSTALL_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_LOCK = LOCK_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_LOCK = FORCE_LOCK_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_UNLOCK = UNLOCK_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_UNLOCK = FORCE_UNLOCK_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_SWACT = SWACT_ACTION
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_SWACT = FORCE_SWACT_ACTION
APP_EVALUATE_REAPPLY_TYPE_RUNTIME_APPLY_PUPPET = 'runtime-apply-puppet'
APP_EVALUATE_REAPPLY_HOST_AVAILABILITY = 'host-availability-updated'
APP_EVALUATE_REAPPLY_TYPE_SYSTEM_MODIFY = 'system-modify'
APP_EVALUATE_REAPPLY_TRIGGER_TO_METADATA_MAP = {
UNLOCK_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_UNLOCK,
FORCE_UNLOCK_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_UNLOCK,
LOCK_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_LOCK,
FORCE_LOCK_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_LOCK,
SWACT_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_SWACT,
FORCE_SWACT_ACTION:
APP_EVALUATE_REAPPLY_TYPE_HOST_FORCE_SWACT,
APP_EVALUATE_REAPPLY_TYPE_RUNTIME_APPLY_PUPPET:
APP_EVALUATE_REAPPLY_TYPE_RUNTIME_APPLY_PUPPET,
APP_EVALUATE_REAPPLY_HOST_AVAILABILITY:
APP_EVALUATE_REAPPLY_HOST_AVAILABILITY,
APP_EVALUATE_REAPPLY_TYPE_HOST_ADD:
APP_EVALUATE_REAPPLY_TYPE_HOST_ADD,
APP_EVALUATE_REAPPLY_TYPE_HOST_REINSTALL:
APP_EVALUATE_REAPPLY_TYPE_HOST_REINSTALL,
APP_EVALUATE_REAPPLY_TYPE_HOST_DELETE:
APP_EVALUATE_REAPPLY_TYPE_HOST_DELETE,
APP_EVALUATE_REAPPLY_TYPE_SYSTEM_MODIFY:
APP_EVALUATE_REAPPLY_TYPE_SYSTEM_MODIFY
}
# Progress constants
APP_PROGRESS_ABORTED = 'operation aborted, check logs for detail'

View File

@ -18,7 +18,7 @@
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2019 Wind River Systems, Inc.
# Copyright (c) 2013-2021 Wind River Systems, Inc.
#
@ -1897,6 +1897,36 @@ def find_metadata_file(path, metadata_file):
maintain_user_overrides: <true|false>
- optional: defaults to false. Over an app update any user overrides are
preserved for the new version of the application
...
behavior: - optional: describes the app behavior
platform_managed_app: <true/false/yes/no> - optional: when absent behaves as false
desired_state: <uploaded/applied> - optional: state the app should reach
evaluate_reapply: - optional: describe the reapply evaluation behaviour
after: - optional: list of apps that should be evaluated before the current one
- <app_name.1>
- <app_name.2>
triggers: - optional: list of what triggers the reapply evaluation
- type: <key in APP_EVALUATE_REAPPLY_TRIGGER_TO_METADATA_MAP>
filters: - optional: list of field:value, that aid filtering
of the trigger events. All pairs in this list must be
present in trigger dictionary that is passed in
the calls (eg. trigger[field_name1]==value_name1 and
trigger[field_name2]==value_name2).
Function evaluate_apps_reapply takes a dictionary called
'trigger' as parameter. Depending on trigger type this
may contain custom information used by apps, for example
a field 'personality' corresponding to node personality.
It is the duty of the app developer to enhance existing
triggers with the required information.
Hard to obtain information should be passed in the trigger.
To use existing information it is as simple as defining
the metadata.
- <field_name.1>: <value_name.1>
- <field_name.2>: <value_name.2>
filter_field: <field_name> - optional: field name in trigger
dictionary. If specified the filters are applied
to trigger[filter_field] sub-dictionary instead
of the root trigger dictionary.
"""
app_name = ''
app_version = ''
@ -1913,16 +1943,124 @@ def find_metadata_file(path, metadata_file):
# metadata file does not have the key(s)
pass
if (app_name is None or
app_version is None):
raise exception.SysinvException(_(
"Invalid %s: app_name or/and app_version "
"is/are None." % metadata_file))
if (app_name is None or
app_version is None):
raise exception.SysinvException(_(
"Invalid %s: app_name or/and app_version "
"is/are None." % metadata_file))
if not isinstance(patches, list):
raise exception.SysinvException(_(
"Invalid %s: patch_dependencies should "
"be a list." % metadata_file))
behavior = None
evaluate_reapply = None
triggers = None
try:
behavior = doc[constants.APP_METADATA_BEHAVIOR]
if not isinstance(behavior, dict):
raise exception.SysinvException(_(
"Invalid {}: {} should be a dict."
"".format(metadata_file,
constants.APP_METADATA_BEHAVIOR)))
except KeyError:
pass
if behavior:
try:
platform_managed_app = behavior[constants.APP_METADATA_PLATFORM_MANAGED_APP]
if not is_valid_boolstr(platform_managed_app):
raise exception.SysinvException(_(
"Invalid {}: {} expected value is a boolean string."
"".format(metadata_file,
constants.APP_METADATA_PLATFORM_MANAGED_APP)))
except KeyError:
pass
try:
desired_state = behavior[constants.APP_METADATA_DESIRED_STATE]
if not isinstance(desired_state, six.string_types):
raise exception.SysinvException(_(
"Invalid {}: {} should be {}."
"".format(metadata_file,
constants.APP_METADATA_DESIRED_STATE,
six.string_types)))
except KeyError:
pass
try:
evaluate_reapply = behavior[constants.APP_METADATA_EVALUATE_REAPPLY]
if not isinstance(evaluate_reapply, dict):
raise exception.SysinvException(_(
"Invalid {}: {} should be a dict."
"".format(metadata_file,
constants.APP_METADATA_EVALUATE_REAPPLY)))
except KeyError:
pass
if evaluate_reapply:
try:
after = evaluate_reapply[constants.APP_METADATA_AFTER]
if not isinstance(after, list):
raise exception.SysinvException(_(
"Invalid {}: {} should be a list."
"".format(metadata_file,
constants.APP_METADATA_AFTER)))
except KeyError:
pass
try:
triggers = evaluate_reapply[constants.APP_METADATA_TRIGGERS]
if not isinstance(triggers, list):
raise exception.SysinvException(_(
"Invalid {}: {} should be a list."
"".format(metadata_file,
constants.APP_METADATA_TRIGGERS)))
except KeyError:
pass
if triggers:
for trigger in triggers:
if not isinstance(trigger, dict):
raise exception.SysinvException(_(
"Invalid {}: element of {} should be a dict."
"".format(metadata_file,
constants.APP_METADATA_TRIGGERS)))
try:
type = trigger[constants.APP_METADATA_TYPE]
if not isinstance(type, six.string_types):
raise exception.SysinvException(_(
"Invalid {}: {} should be {}."
"".format(metadata_file,
constants.APP_METADATA_TYPE,
six.string_types)))
except KeyError:
pass
try:
filter_field = trigger[constants.APP_METADATA_FILTER_FIELD]
if not isinstance(filter_field, six.string_types):
raise exception.SysinvException(_(
"Invalid {}: {} should be {}."
"".format(metadata_file,
constants.APP_METADATA_TYPE,
six.string_types)))
except KeyError:
pass
try:
filters = trigger[constants.APP_METADATA_FILTERS]
if not isinstance(filters, list):
raise exception.SysinvException(_(
"Invalid {}: {} should be a list."
"".format(metadata_file,
constants.APP_METADATA_TYPE)))
except KeyError:
pass
if not isinstance(patches, list):
raise exception.SysinvException(_(
"Invalid %s: patch_dependencies should "
"be a list." % metadata_file))
return app_name, app_version, patches
@ -2073,6 +2211,12 @@ def generate_synced_armada_manifest_fqpn(app_name, app_version, manifest_filenam
app_name + '-' + manifest_filename)
def generate_synced_metadata_fqpn(app_name, app_version):
return os.path.join(
constants.APP_SYNCED_ARMADA_DATA_PATH, app_name, app_version,
'metadata.yaml')
def is_chart_enabled(dbapi, app_name, chart_name, namespace):
"""
Check if the chart is enable at an application level

View File

@ -139,9 +139,10 @@ class AppOperator(object):
# List of in progress apps and their abort status
abort_requested = {}
def __init__(self, dbapi, helm_op):
def __init__(self, dbapi, helm_op, apps_metadata):
self._dbapi = dbapi
self._helm = helm_op
self._apps_metadata = apps_metadata
self._plugins = PluginHelper(self._dbapi, self._helm)
self._fm_api = fm_api.FaultAPIs()
self._docker = DockerHelper(self._dbapi)
@ -1884,6 +1885,30 @@ class AppOperator(object):
lifecycle_op = self._helm.get_app_lifecycle_operator(app.name)
lifecycle_op.app_lifecycle_actions(context, conductor_obj, self, app, hook_info)
def load_application_metadata(self, rpc_app):
""" Load the application metadata from the metadata file of the app
:param rpc_app: data object provided in the rpc request
"""
LOG.info("Loading application metadata for %s" % rpc_app.name)
app = AppOperator.Application(rpc_app)
metadata = None
if os.path.exists(app.sync_metadata_file):
with open(app.sync_metadata_file, 'r') as f:
# The RoundTripLoader removes the superfluous quotes by default.
# Set preserve_quotes=True to preserve all the quotes.
# The assumption here: there is just one yaml section
metadata = yaml.load(
f, Loader=yaml.RoundTripLoader, preserve_quotes=True)
if metadata:
self._apps_metadata[constants.APP_METADATA_APPS][app.name] = metadata
LOG.info("Loaded metadata for app {}: {}".format(app.name, metadata))
def perform_app_apply(self, rpc_app, mode, lifecycle_hook_info_app_apply, caller=None):
"""Process application install request
@ -2120,6 +2145,8 @@ class AppOperator(object):
lifecycle_hook_info_app_upload=lifecycle_hook_info_app_update)
lifecycle_hook_info_app_update.operation = constants.APP_UPDATE_OP
self.load_application_metadata(to_rpc_app)
# Check whether the new application is compatible with the current k8s version
self._utils._check_app_compatibility(to_app.name, to_app.version)
@ -2440,6 +2467,9 @@ class AppOperator(object):
self.sync_imgfile = generate_synced_images_fqpn(
self._kube_app.get('name'),
self._kube_app.get('app_version'))
self.sync_metadata_file = cutils.generate_synced_metadata_fqpn(
self._kube_app.get('name'),
self._kube_app.get('app_version'))
# Files: FQPN formatted for the docker armada_service
self.armada_service_mfile = generate_armada_service_manifest_fqpn(

View File

@ -183,6 +183,7 @@ class ConductorManager(service.PeriodicService):
RPC_API_VERSION = '1.1'
my_host_id = None
apps_metadata = {constants.APP_METADATA_APPS: {}}
def __init__(self, host, topic):
serializer = objects_base.SysinvObjectSerializer()
@ -255,7 +256,7 @@ class ConductorManager(service.PeriodicService):
# until host unlock and we need ceph-mon up in order to configure
# ceph for the initial unlock.
self._helm = helm.HelmOperator(self.dbapi)
self._app = kube_app.AppOperator(self.dbapi, self._helm)
self._app = kube_app.AppOperator(self.dbapi, self._helm, self.apps_metadata)
self._docker = kube_app.DockerHelper(self.dbapi)
self._kube = kubernetes.KubeOperator()
self._armada = kube_app.ArmadaHelper(self._kube)
@ -274,6 +275,10 @@ class ConductorManager(service.PeriodicService):
# Save our start time for time limited init actions
self._start_time = timeutils.utcnow()
# Load apps metadata
for app in self.dbapi.kube_app_get_all():
self._app.load_application_metadata(app)
def _get_active_controller_uuid(self):
ahost = utils.HostHelper.get_active_controller(self.dbapi)
if ahost:
@ -4815,11 +4820,9 @@ class ConductorManager(service.PeriodicService):
# Check if apps need to be re-applied when host services are
# available (after unlock), but only if system restore is not in
# progress
if not os.path.isfile(tsc.RESTORE_IN_PROGRESS_FLAG) \
and availability in [constants.VIM_SERVICES_ENABLED]:
for app_name in constants.HELM_APPS_WITH_REAPPLY_SUPPORT:
if cutils.is_app_applied(self.dbapi, app_name):
self.evaluate_app_reapply(context, app_name)
if not os.path.isfile(tsc.RESTORE_IN_PROGRESS_FLAG):
self.evaluate_apps_reapply(context, trigger={'type': constants.APP_EVALUATE_REAPPLY_HOST_AVAILABILITY,
'availability': availability})
# Clear any "reboot needed" DB entry for the host if it is set.
# If there are no more pending device image update entries in the DB
@ -5739,7 +5742,7 @@ class ConductorManager(service.PeriodicService):
# Pick first app that needs to be re-applied
for index, app_name in enumerate(
constants.HELM_APPS_WITH_REAPPLY_SUPPORT):
self.determine_apps_reapply_order(name_only=True)):
if self._app.needs_reapply(app_name):
break
else:
@ -9229,9 +9232,7 @@ class ConductorManager(service.PeriodicService):
host_uuids=host_uuids,
force=force)
for app_name in constants.HELM_APPS_WITH_REAPPLY_SUPPORT:
if cutils.is_app_applied(self.dbapi, app_name):
self.evaluate_app_reapply(context, app_name)
self.evaluate_apps_reapply(context, trigger={'type': constants.APP_EVALUATE_REAPPLY_TYPE_RUNTIME_APPLY_PUPPET})
# Remove reboot required flag in case it's present. Runtime manifests
# are no supposed to clear this flag. A host lock/unlock cycle (or similar)
@ -11741,6 +11742,121 @@ class ConductorManager(service.PeriodicService):
"""
return self._fernet.get_fernet_keys(key_id)
def determine_apps_reapply_order(self, name_only):
# TODO(dvoicule) reorder apps based on dependencies between them
# now make HELM_APP_PLATFORM first to keep backward compatibility
ordered_apps = []
try:
apps = list(filter(lambda app: app.active, self.dbapi.kube_app_get_all()))
ordered_apps = sorted(apps, key=lambda x: x.get('name', 'placeholder')
if x.get('name', 'placeholder') != constants.HELM_APP_PLATFORM else '')
if name_only:
ordered_apps = [app.name for app in ordered_apps]
except Exception as e:
LOG.error("Error while ordering apps for reapply {}".format(e))
ordered_apps = []
return ordered_apps
def evaluate_apps_reapply(self, context, trigger):
"""Synchronously, determine whether an application
re-apply is needed, and if so, raise the re-apply flag.
Run 2 checks before doing an app evaluation.
First check is a semantic check calling a lifecycle hook which can
implement complex logic.
Second check is specified in metadata which allows faster development
time, doing simple key:value comparisons. Check that the 'trigger'
parameter of the function contains a list of key:value pairs at a
specified location. Default location for searching is root of 'trigger'
dictionary. If the keys are absent or the values do not match, then the
check is considered failed and the evaluation skipped.
:param context: request context.
:param trigger: dictionary containing at least the 'type' field
"""
LOG.info("Evaluating apps reapply {} ".format(trigger))
apps = self.determine_apps_reapply_order(name_only=False)
metadata_map = constants.APP_EVALUATE_REAPPLY_TRIGGER_TO_METADATA_MAP
for app in apps:
app_metadata = self.apps_metadata[constants.APP_METADATA_APPS].get(app.name, {})
try:
app_triggers = app_metadata[constants.APP_METADATA_BEHAVIOR][
constants.APP_METADATA_EVALUATE_REAPPLY][
constants.APP_METADATA_TRIGGERS]
except KeyError:
continue
try:
hook_info = LifecycleHookInfo()
hook_info.mode = constants.APP_LIFECYCLE_MODE_AUTO
hook_info.operation = constants.APP_EVALUATE_REAPPLY_OP
hook_info.lifecycle_type = constants.APP_LIFECYCLE_TYPE_SEMANTIC_CHECK
hook_info.extra[LifecycleConstants.EVALUATE_REAPPLY_TRIGGER] = trigger
self.app_lifecycle_actions(context=context, rpc_app=app, hook_info=hook_info)
except exception.LifecycleSemanticCheckException as e:
LOG.info("Evaluate reapply for {} rejected: {}".format(app.name, e))
continue
except exception.LifecycleMissingInfo as e:
LOG.error("Evaluate reapply for {} error: {}".format(app.name, e))
continue
except Exception as e:
LOG.error("Unexpected error during hook for app {}, error: {}"
"".format(app.name, e))
continue
if trigger['type'] in metadata_map.keys():
# Check if the app subscribes to this trigger type
if filter(lambda t: t.get('type', None) ==
metadata_map[trigger['type']],
app_triggers):
# Get the first trigger with a specific type in the metadata
app_trigger = [x for x in app_triggers if
x.get(constants.APP_METADATA_TYPE, None) == metadata_map[trigger['type']]][0]
# Get the filters for the trigger
trigger_filters = app_trigger.get(constants.APP_METADATA_FILTERS, [])
# Get which field inside the trigger should have the filters applied on
# Default is the trigger dictionary itself, but can be redirected to
# a sub-dictionary
target_for_filters_field = app_trigger.get(constants.APP_METADATA_FILTER_FIELD, None)
if target_for_filters_field is None:
target_for_filters = trigger
else:
if target_for_filters_field not in trigger:
LOG.error("Trigger {} does not have field {}"
"".format(trigger, target_for_filters_field))
continue
target_for_filters = trigger[target_for_filters_field]
allow = True
# All filters must match, if any doesn't match then reject
# the evaluation
for filter_ in trigger_filters:
# Each filter is a single entry dict
k = filter_.keys()[0]
if k not in target_for_filters:
LOG.info("Evaluate reapply for {} rejected: "
"trigger field {} absent".format(app.name, k))
allow = False
break
elif str(target_for_filters[k]) != str(filter_[k]):
LOG.info("Evaluate reapply for {} rejected: "
"trigger field {} expected {} but got {} "
"".format(app.name, k, filter_[k], target_for_filters[k]))
allow = False
break
if allow:
self.evaluate_app_reapply(context, app.name)
def evaluate_app_reapply(self, context, app_name):
"""Synchronously, determine whether an application
re-apply is needed, and if so, raise the re-apply flag.
@ -11792,7 +11908,8 @@ class ConductorManager(service.PeriodicService):
LOG.exception("Failed to regenerate the overrides for app %s. %s" %
(app.name, e))
else:
LOG.info("%s app status does not warrant re-apply", app.name)
LOG.info("{} app active:{} status:{} does not warrant re-apply",
app.name, app.active, app.status)
def app_lifecycle_actions(self, context, rpc_app, hook_info):
"""Perform any lifecycle actions for the operation and timing supplied.
@ -11830,6 +11947,8 @@ class ConductorManager(service.PeriodicService):
except Exception as e:
LOG.error("Error performing app_lifecycle_actions %s" % str(e))
self._app.load_application_metadata(rpc_app)
def perform_app_apply(self, context, rpc_app, mode, lifecycle_hook_info_app_apply):
"""Handling of application install request (via AppOperator)

View File

@ -1814,6 +1814,17 @@ class ConductorAPI(sysinv.openstack.common.rpc.proxy.RpcProxy):
return self.call(context, self.make_msg('get_fernet_keys',
key_id=key_id))
def evaluate_apps_reapply(self, context, trigger):
"""Synchronously, determine whether an application
re-apply is needed, and if so, raise the re-apply flag.
:param context: request context.
:param trigger: dictionary containing at least the 'type' field
"""
return self.call(context, self.make_msg('evaluate_apps_reapply',
trigger=trigger))
def evaluate_app_reapply(self, context, app_name):
"""Synchronously, determine whether an application
re-apply is needed, and if so, raise the re-apply flag.

View File

@ -33,17 +33,22 @@ class AppLifecycleOperator(object):
"""
# Semantic checks
if hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_SEMANTIC_CHECK and \
hook_info.mode == constants.APP_LIFECYCLE_MODE_AUTO and \
hook_info.operation == constants.APP_APPLY_OP and \
hook_info.relative_timing == constants.APP_LIFECYCLE_TIMING_PRE:
raise exception.LifecycleSemanticCheckException(
"Automatic apply is disabled for %s." % app.name)
if hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_SEMANTIC_CHECK:
if hook_info.mode == constants.APP_LIFECYCLE_MODE_AUTO and \
hook_info.operation == constants.APP_APPLY_OP and \
hook_info.relative_timing == constants.APP_LIFECYCLE_TIMING_PRE:
raise exception.LifecycleSemanticCheckException(
"Automatic apply is disabled for %s." % app.name)
elif hook_info.mode == constants.APP_LIFECYCLE_MODE_AUTO and \
hook_info.operation == constants.APP_EVALUATE_REAPPLY_OP:
# To reject the reapply evaluation an app can override this
# hook and raise exception.LifecycleSemanticCheckException
pass
# TODO(dvoicule) remove once each app has its lifecycle operator and takes care of its rbd
# this is here to keep the same functionality while decoupling
# Rbd
if hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_RBD:
elif hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_RBD:
if hook_info.operation == constants.APP_APPLY_OP and \
hook_info.relative_timing == constants.APP_LIFECYCLE_TIMING_PRE:
lifecycle_utils.create_rbd_provisioner_secrets(app_op, app, hook_info)
@ -54,7 +59,7 @@ class AppLifecycleOperator(object):
# TODO(dvoicule) remove once each app has its lifecycle operator and takes care of its resources
# this is here to keep the same functionality while decoupling
# Resources
if hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_RESOURCE:
elif hook_info.lifecycle_type == constants.APP_LIFECYCLE_TYPE_RESOURCE:
if hook_info.operation == constants.APP_APPLY_OP and \
hook_info.relative_timing == constants.APP_LIFECYCLE_TIMING_PRE:
lifecycle_utils.create_local_registry_secrets(app_op, app, hook_info)

View File

@ -18,3 +18,8 @@ class LifecycleConstants(object):
APP_APPLIED = 'app_applied'
APP_REMOVED = 'app_removed'
RETURN_CODE = 'rc'
EVALUATE_REAPPLY_TRIGGER = 'trigger'
TRIGGER_CONFIGURE_REQUIRED = 'configure_required'
TRIGGER_TYPE = 'type'
TRIGGER_OPENSTACK_WORKER = 'openstack_worker'
TRIGGER_DELTA_FIELDS = 'delta_fields'

View File

@ -38,6 +38,7 @@ class FakeConductorAPI(object):
self.remove_host_config = mock.MagicMock()
self.delete_barbican_secret = mock.MagicMock()
self.iplatform_update_by_ihost = mock.MagicMock()
self.evaluate_apps_reapply = mock.MagicMock()
self.evaluate_app_reapply = mock.MagicMock()
self.update_clock_synchronization_config = mock.MagicMock()
self.store_default_config = mock.MagicMock()
@ -253,6 +254,35 @@ class TestPostControllerMixin(object):
self.assertEqual(ndict['personality'], result['personality'])
self.assertEqual(ndict['serialid'], result['serialid'])
def test_create_host_evaluate_apps_reapply(self):
self.skipTest("Need to allow tests to run from UNPROVISIONED"
" to reach host-add evaluate")
c1 = self._create_controller_1(
invprovision=constants.UNPROVISIONED,
administrative=constants.ADMIN_LOCKED,
operational=constants.OPERATIONAL_DISABLED,
availability=constants.AVAILABILITY_ONLINE)
self._create_test_host_platform_interface(c1)
# Unlock
_ = self._patch_host(c1['hostname'],
[{'path': '/action',
'value': constants.UNLOCK_ACTION,
'op': 'replace'},
{'path': '/operational',
'value': constants.OPERATIONAL_ENABLED,
'op': 'replace'},
{'path': '/availability',
'value': constants.AVAILABILITY_ONLINE,
'op': 'replace'}],
'mtce')
# Verify that the apps reapply was called
# once for unlock and once for host-add
assert(self.fake_conductor_api.evaluate_apps_reapply.call_count == 2)
def test_create_host_missing_mgmt_mac(self):
# Test creation of a second node with missing management MAC
ndict = dbutils.post_get_test_ihost(hostname='controller-1',
@ -1563,6 +1593,8 @@ class TestDelete(TestHost):
self.fake_conductor_api.unconfigure_ihost.assert_called_once()
# Verify that the host was deleted from barbican
self.fake_conductor_api.delete_barbican_secret.assert_called_once()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was dropped from patching
self.mock_patch_api_drop_host.assert_called_once()
# Verify the host no longer exists
@ -1901,6 +1933,8 @@ class TestPatch(TestHost):
self.fake_conductor_api.configure_ihost.assert_called_once()
# Verify that the app reapply was checked
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was added to maintenance
self.mock_mtce_api_host_modify.assert_called_once()
# Verify that the host action was cleared
@ -1934,6 +1968,8 @@ class TestPatch(TestHost):
self.fake_conductor_api.configure_ihost.assert_called_once()
# Verify that the app reapply was checked
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was modified in maintenance
self.mock_mtce_api_host_modify.assert_called_once()
# Verify that the host action was cleared
@ -1994,6 +2030,8 @@ class TestPatch(TestHost):
self.fake_conductor_api.configure_ihost.assert_not_called()
# Verify that the app reapply evaluate was not configured
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was not modified in maintenance
self.mock_mtce_api_host_modify.assert_not_called()
# Verify that the host action was cleared
@ -2080,6 +2118,8 @@ class TestPatch(TestHost):
# Verify that the host action was cleared
result = self.get_json('/ihosts/%s' % c1_host['hostname'])
self.assertEqual(constants.NONE_ACTION, result['action'])
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
def test_unlock_action_controller_while_upgrading_kubelet(self):
# Create controller-0
@ -2182,6 +2222,8 @@ class TestPatch(TestHost):
self.fake_conductor_api.configure_ihost.assert_called_once()
# Verify that the app reapply was checked
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was added to maintenance
self.mock_mtce_api_host_modify.assert_called_once()
# Verify that the host action was cleared
@ -2311,6 +2353,8 @@ class TestPatch(TestHost):
self.fake_conductor_api.configure_ihost.assert_not_called()
# Verify that the app reapply evaluate was not configured
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was not modified in maintenance
self.mock_mtce_api_host_modify.assert_not_called()
# Verify that the host action was cleared
@ -2365,6 +2409,8 @@ class TestPatchStdDuplexControllerAction(TestHost):
self.fake_conductor_api.configure_ihost.assert_not_called()
# Verify that the app reapply evaluate was not configured
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was modified in maintenance
self.mock_mtce_api_host_modify.assert_called_once()
# Verify that the host action was cleared
@ -2488,6 +2534,8 @@ class TestPatchStdDuplexControllerAction(TestHost):
self.fake_conductor_api.configure_ihost.assert_not_called()
# Verify that the app reapply evaluate was not configured
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was modified in maintenance
self.mock_mtce_api_host_modify.assert_called_once()
# Verify that the host action was cleared
@ -2591,6 +2639,8 @@ class TestPatchStdDuplexControllerAction(TestHost):
self.mock_vim_api_host_action.assert_not_called()
# Verify that the app reapply evaluate was not configured
self.fake_conductor_api.evaluate_app_reapply.assert_not_called()
# Verify that the apps reapply was called
self.fake_conductor_api.evaluate_apps_reapply.assert_called_once()
# Verify that the host was configured
self.fake_conductor_api.configure_ihost.assert_called_once()
# Verify that the host was modified in maintenance

View File

@ -10,6 +10,7 @@ import fixtures
from sysinv.common import constants
from sysinv.conductor import kube_app
from sysinv.conductor import manager
from sysinv.db import api as dbapi
from sysinv.helm import helm
from sysinv.openstack.common import context
@ -24,10 +25,14 @@ class AppOperatorTestCase(base.DbTestCase):
def setUp(self):
super(AppOperatorTestCase, self).setUp()
# Manager holds apps_metadata dict
self.service = manager.ConductorManager('test-host', 'test-topic')
# Set up objects for testing
self.helm_operator = helm.HelmOperator(dbapi.get_instance())
self.app_operator = kube_app.AppOperator(dbapi.get_instance(),
self.helm_operator)
self.helm_operator,
self.service.apps_metadata)
self.context = context.get_admin_context()
self.dbapi = dbapi.get_instance()
self.temp_dir = self.useFixture(fixtures.TempDir())