Standardize aggregation methods and granularities for Gnocchi collector

This PR proposes the standardization of aggregation methods allowed by
CloudKitty in Gnocchi collector as the aggregation methods available in
Gnocchi API for both archive policies and aggregation API. The rationale is
that we need to support the same aggregation methods available there.
Otherwise, users can try to use something available in the Gnocchi API, and it
does not work due to a limitation in CloudKitty.

We also propose to set the default granularity as 3600. CloudKitty is already
using one hour time frames by default. There is no sense in not setting the
granularity as 3600 as well. If we do not define the granularity 3600 by
default, CloudKitty will always retrieve (in the aggregation API) measurements
for all available granularities, which is an overhead. Besides being an
overhead, it can cause huge inconsistencies, if for some reason (that we
painfully discovered in production), the granularity of 3600 is not returned.
The method "_format_data"  does not check if the data being
"formated"/obtained from the measurements object represents the 3600 (1h) time
frame, it just retrieves the first element, and moves on; thus, leading to
wrong billing information.

And last, but not least, we propose a debug log to show in the log files, when
debug log level is enabled, the response received in the aggregation API. This
can enable debug for operation people. Otherwise, we need to change code to be
able to troubleshoot problems with CloudKitty processing when using Gnocchi
collector.

Change-Id: I8a1a75a74e04cb9babdf09f115620b152861e218
This commit is contained in:
Rafael Weingärtner 2020-01-29 09:29:12 -03:00
parent 9e505f31a9
commit 385d20520f
3 changed files with 44 additions and 24 deletions

View File

@ -75,6 +75,27 @@ cfg.CONF.register_opts(collector_gnocchi_opts, COLLECTOR_GNOCCHI_OPTS)
CONF = cfg.CONF
# According to 'gnocchi/rest/aggregates/operations.py#AGG_MAP' and
# 'gnocchi/rest/aggregates/operations.py#AGG_MAP' the following are the basic
# aggregation methods that one can use when configuring an aggregation
# method in the archive policy in Gnocchi or using the aggregation API.
BASIC_AGGREGATION_METHODS = set(('mean', 'sum', 'last', 'max', 'min', 'std',
'median', 'first', 'count'))
for agg in list(BASIC_AGGREGATION_METHODS):
BASIC_AGGREGATION_METHODS.add("rate:%s" % agg)
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY = set(
(str(i) + 'pct' for i in six.moves.range(1, 100)))
for agg in list(EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY):
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg)
# The aggregation method that one can use to configure the archive
# policies also supports the 'pct' (percentile) operation. Therefore,
# we also expose this as a configuration.
VALID_AGGREGATION_METHODS_FOR_METRICS = BASIC_AGGREGATION_METHODS.union(
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY)
GNOCCHI_EXTRA_SCHEMA = {
Required('extra_args'): {
Required('resource_type'): All(str, Length(min=1)),
@ -82,11 +103,10 @@ GNOCCHI_EXTRA_SCHEMA = {
# This parameter permits to adapt the key of the resource identifier
Required('resource_key', default='id'): All(str, Length(min=1)),
Required('aggregation_method', default='max'):
In(['max', 'mean', 'min', 'rate:max', 'rate:mean', 'rate:min']),
Required('re_aggregation_method', default=None):
In([None, 'mean', 'median', 'std',
'min', 'max', 'sum', 'var', 'count']),
Required('force_granularity', default=0): All(int, Range(min=0)),
In(VALID_AGGREGATION_METHODS_FOR_METRICS),
Required('re_aggregation_method', default='max'):
In(BASIC_AGGREGATION_METHODS),
Required('force_granularity', default=3600): All(int, Range(min=0)),
},
}
@ -295,28 +315,23 @@ class GnocchiCollector(collector.BaseCollector):
if q_filter:
query_parameters.append(q_filter)
re_aggregation_method = extra_args['re_aggregation_method']
if re_aggregation_method is None:
re_aggregation_method = extra_args['aggregation_method']
# build aggregration operation
op = ["aggregate", re_aggregation_method,
["metric", metric_name, extra_args['aggregation_method']]]
# get groupby
groupby = self.conf[metric_name]['groupby']
op = self.build_operation_command(extra_args, metric_name)
agg_kwargs = {
'resource_type': resource_type,
'start': start,
'stop': end,
'groupby': groupby,
'groupby': self.conf[metric_name]['groupby'],
'search': self.extend_filter(*query_parameters),
}
if extra_args['force_granularity'] > 0:
agg_kwargs['granularity'] = extra_args['force_granularity']
try:
return self._conn.aggregates.fetch(op, **agg_kwargs)
measurements = self._conn.aggregates.fetch(op, **agg_kwargs)
LOG.debug("Measurements [%s] received with operation [%s] and "
"arguments [%s].", measurements, op, agg_kwargs)
return measurements
except (gexceptions.MetricNotFound, gexceptions.BadRequest) as e:
# FIXME(peschk_l): gnocchiclient seems to be raising a BadRequest
# when it should be raising MetricNotFound
@ -327,6 +342,13 @@ class GnocchiCollector(collector.BaseCollector):
'current cycle.'.format(scope=project_id, err=e))
return []
@staticmethod
def build_operation_command(extra_args, metric_name):
re_aggregation_method = extra_args['re_aggregation_method']
op = ["aggregate", re_aggregation_method,
["metric", metric_name, extra_args['aggregation_method']]]
return op
def _format_data(self, metconf, data, resources_info=None):
"""Formats gnocchi data to CK data.

View File

@ -180,6 +180,7 @@ class GnocchiCollectorAggregationOperationTest(tests.TestCase):
resource_type='resource_x',
search={'=': {'type': 'resource_x'}},
start=self.start, stop=self.end,
granularity=3600
)
def test_no_agg_no_re_agg(self):
@ -192,7 +193,7 @@ class GnocchiCollectorAggregationOperationTest(tests.TestCase):
'resource_type': 'resource_x',
'aggregation_method': 'mean',
}
expected_op = ["aggregate", "mean", ["metric", "metric_one", "mean"]]
expected_op = ["aggregate", "max", ["metric", "metric_one", "mean"]]
self.do_test(expected_op, extra_args=extra_args)
def test_no_agg_custom_re_agg(self):

View File

@ -69,12 +69,9 @@ class MetricConfigValidationTest(tests.TestCase):
expected_output = copy.deepcopy(self.base_output)
expected_output['metric_one']['groupby'] += ['project_id', 'id']
expected_output['metric_one']['extra_args'] = {
'aggregation_method': 'max',
're_aggregation_method': None,
'force_granularity': 0,
'resource_type': 'res',
'resource_key': 'id',
}
'aggregation_method': 'max', 're_aggregation_method': 'max',
'force_granularity': 3600, 'resource_type': 'res',
'resource_key': 'id'}
self.assertEqual(
collector.gnocchi.GnocchiCollector.check_configuration(data),