Add mask keys parameters to failure logging

This adds the two parameter mask_inputs_keys
and mask_outputs_keys to the failure logging
formatter and the dynamic listener so that we
can mask fields in provides and requires data
if it's a dict.

Change-Id: Ib05255e01f806c5a134538cc8ddd168e45503a7f
This commit is contained in:
Tobias Urdin 2024-10-21 10:17:48 +02:00
parent b4a2f99035
commit a5b7323fa4
2 changed files with 26 additions and 5 deletions

View File

@ -12,6 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
from taskflow.engines.action_engine import compiler
@ -63,10 +64,22 @@ class FailureFormatter:
states.EXECUTE: (_fetch_predecessor_tree, 'predecessors'),
}
def __init__(self, engine, hide_inputs_outputs_of=()):
def __init__(self, engine, hide_inputs_outputs_of=(),
mask_inputs_keys=(), mask_outputs_keys=()):
self._hide_inputs_outputs_of = hide_inputs_outputs_of
self._mask_inputs_keys = mask_inputs_keys
self._mask_outputs_keys = mask_outputs_keys
self._engine = engine
def _mask_keys(self, data, mask_keys):
if not data or not isinstance(data, dict):
return data
result = copy.deepcopy(data)
for k in mask_keys:
if k in result:
result[k] = '***'
return result
def _format_node(self, storage, cache, node):
"""Formats a single tree node into a string version."""
if node.metadata['kind'] == compiler.FLOW:
@ -98,12 +111,14 @@ class FailureFormatter:
atom_name,
fetch_mapped_args)
if requires_found:
atom_attrs['requires'] = requires
atom_attrs['requires'] = self._mask_keys(
requires, self._mask_inputs_keys)
provides, provides_found = _cached_get(
cache, 'provides', atom_name,
storage.get_execute_result, atom_name)
if provides_found:
atom_attrs['provides'] = provides
atom_attrs['provides'] = self._mask_keys(
provides, self._mask_outputs_keys)
if atom_attrs:
return "Atom '{}' {}".format(atom_name, atom_attrs)
else:

View File

@ -108,7 +108,9 @@ class DynamicLoggingListener(base.Listener):
retry_listen_for=base.DEFAULT_LISTEN_FOR,
log=None, failure_level=logging.WARNING,
level=logging.DEBUG, hide_inputs_outputs_of=(),
fail_formatter=None):
fail_formatter=None,
mask_inputs_keys=(),
mask_outputs_keys=()):
super().__init__(
engine, task_listen_for=task_listen_for,
flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for)
@ -125,11 +127,15 @@ class DynamicLoggingListener(base.Listener):
states.REVERTED: self._failure_level,
}
self._hide_inputs_outputs_of = frozenset(hide_inputs_outputs_of)
self._mask_inputs_keys = frozenset(mask_inputs_keys)
self._mask_outputs_keys = frozenset(mask_outputs_keys)
self._logger = misc.pick_first_not_none(log, self._LOGGER, LOG)
if fail_formatter is None:
self._fail_formatter = formatters.FailureFormatter(
self._engine,
hide_inputs_outputs_of=self._hide_inputs_outputs_of)
hide_inputs_outputs_of=self._hide_inputs_outputs_of,
mask_inputs_keys=self._mask_inputs_keys,
mask_outputs_keys=self._mask_outputs_keys)
else:
self._fail_formatter = fail_formatter