Merge "Allows multiple rating types for same metric for gnocchi"
This commit is contained in:
commit
f2e70ca36f
@ -13,6 +13,7 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
|
import copy
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
@ -24,6 +25,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from voluptuous import All
|
from voluptuous import All
|
||||||
from voluptuous import In
|
from voluptuous import In
|
||||||
|
from voluptuous import Invalid
|
||||||
from voluptuous import Length
|
from voluptuous import Length
|
||||||
from voluptuous import Range
|
from voluptuous import Range
|
||||||
from voluptuous import Required
|
from voluptuous import Required
|
||||||
@ -39,6 +41,24 @@ LOG = logging.getLogger(__name__)
|
|||||||
|
|
||||||
COLLECTOR_GNOCCHI_OPTS = 'collector_gnocchi'
|
COLLECTOR_GNOCCHI_OPTS = 'collector_gnocchi'
|
||||||
|
|
||||||
|
|
||||||
|
def GnocchiMetricDict(value):
|
||||||
|
if isinstance(value, dict) and len(value.keys()) > 0:
|
||||||
|
return value
|
||||||
|
if isinstance(value, list) and len(value) > 0:
|
||||||
|
for v in value:
|
||||||
|
if not (isinstance(v, dict) and len(v.keys()) > 0):
|
||||||
|
raise Invalid("Not a dict with at least one key or a "
|
||||||
|
"list with at least one dict with at "
|
||||||
|
"least one key. Provided value: %s" % value)
|
||||||
|
return value
|
||||||
|
raise Invalid("Not a dict with at least one key or a "
|
||||||
|
"list with at least one dict with at "
|
||||||
|
"least one key. Provided value: %s" % value)
|
||||||
|
|
||||||
|
|
||||||
|
GNOCCHI_CONF_SCHEMA = {Required('metrics'): GnocchiMetricDict}
|
||||||
|
|
||||||
collector_gnocchi_opts = [
|
collector_gnocchi_opts = [
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
'gnocchi_auth_type',
|
'gnocchi_auth_type',
|
||||||
@ -183,16 +203,30 @@ class GnocchiCollector(collector.BaseCollector):
|
|||||||
"""Check metrics configuration
|
"""Check metrics configuration
|
||||||
|
|
||||||
"""
|
"""
|
||||||
conf = collector.BaseCollector.check_configuration(conf)
|
conf = Schema(GNOCCHI_CONF_SCHEMA)(conf)
|
||||||
|
conf = copy.deepcopy(conf)
|
||||||
|
scope_key = CONF.collect.scope_key
|
||||||
metric_schema = Schema(collector.METRIC_BASE_SCHEMA).extend(
|
metric_schema = Schema(collector.METRIC_BASE_SCHEMA).extend(
|
||||||
GNOCCHI_EXTRA_SCHEMA)
|
GNOCCHI_EXTRA_SCHEMA)
|
||||||
|
|
||||||
output = {}
|
output = {}
|
||||||
for metric_name, metric in conf.items():
|
for metric_name, metric in conf['metrics'].items():
|
||||||
met = output[metric_name] = metric_schema(metric)
|
if not isinstance(metric, list):
|
||||||
|
metric = [metric]
|
||||||
|
for m in metric:
|
||||||
|
met = metric_schema(m)
|
||||||
|
m.update(met)
|
||||||
|
if scope_key not in m['groupby']:
|
||||||
|
m['groupby'].append(scope_key)
|
||||||
|
if met['extra_args']['resource_key'] not in m['groupby']:
|
||||||
|
m['groupby'].append(met['extra_args']['resource_key'])
|
||||||
|
|
||||||
if met['extra_args']['resource_key'] not in met['groupby']:
|
names = [metric_name]
|
||||||
met['groupby'].append(met['extra_args']['resource_key'])
|
alt_name = met.get('alt_name')
|
||||||
|
if alt_name is not None:
|
||||||
|
names.append(alt_name)
|
||||||
|
new_metric_name = "@#".join(names)
|
||||||
|
output[new_metric_name] = m
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -337,7 +371,7 @@ class GnocchiCollector(collector.BaseCollector):
|
|||||||
if 'Metrics not found' not in e.message["cause"]:
|
if 'Metrics not found' not in e.message["cause"]:
|
||||||
raise
|
raise
|
||||||
LOG.warning('[{scope}] Skipping this metric for the '
|
LOG.warning('[{scope}] Skipping this metric for the '
|
||||||
'current cycle.'.format(scope=project_id, err=e))
|
'current cycle.'.format(scope=project_id))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def get_aggregation_api_arguments(self, end, metric_name, project_id,
|
def get_aggregation_api_arguments(self, end, metric_name, project_id,
|
||||||
@ -392,6 +426,7 @@ class GnocchiCollector(collector.BaseCollector):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_aggregation_operation(extra_args, metric_name):
|
def generate_aggregation_operation(extra_args, metric_name):
|
||||||
|
metric_name = metric_name.split('@#')[0]
|
||||||
aggregation_method = extra_args['aggregation_method']
|
aggregation_method = extra_args['aggregation_method']
|
||||||
re_aggregation_method = aggregation_method
|
re_aggregation_method = aggregation_method
|
||||||
|
|
||||||
|
@ -400,7 +400,7 @@ class Worker(BaseWorker):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def do_execute_scope_processing(self, timestamp):
|
def do_execute_scope_processing(self, timestamp):
|
||||||
metrics = list(self._conf['metrics'].keys())
|
metrics = list(self._collector.conf.keys())
|
||||||
# Collection
|
# Collection
|
||||||
metrics = sorted(metrics)
|
metrics = sorted(metrics)
|
||||||
usage_data = self._do_collection(metrics, timestamp)
|
usage_data = self._do_collection(metrics, timestamp)
|
||||||
|
@ -67,6 +67,34 @@ class GnocchiCollectorTest(tests.TestCase):
|
|||||||
}}]}
|
}}]}
|
||||||
self.assertEqual(expected, actual)
|
self.assertEqual(expected, actual)
|
||||||
|
|
||||||
|
def test_collector_retrieve_metrics(self):
|
||||||
|
expected_data = {"group": {"id": "id-1",
|
||||||
|
"revision_start": datetime.datetime(
|
||||||
|
2020, 1, 1, 1, 10, 0, tzinfo=tz.tzutc())
|
||||||
|
}}
|
||||||
|
|
||||||
|
data = [
|
||||||
|
{"group": {"id": "id-1", "revision_start": datetime.datetime(
|
||||||
|
2020, 1, 1, tzinfo=tz.tzutc())}},
|
||||||
|
expected_data
|
||||||
|
]
|
||||||
|
|
||||||
|
no_response = mock.patch(
|
||||||
|
|
||||||
|
'cloudkitty.collector.gnocchi.GnocchiCollector.fetch_all',
|
||||||
|
return_value=data,
|
||||||
|
)
|
||||||
|
|
||||||
|
for c in self.collector.conf:
|
||||||
|
with no_response:
|
||||||
|
actual_name, actual_data = self.collector.retrieve(
|
||||||
|
metric_name=c,
|
||||||
|
start=samples.FIRST_PERIOD_BEGIN,
|
||||||
|
end=samples.FIRST_PERIOD_END,
|
||||||
|
project_id=samples.TENANT,
|
||||||
|
q_filter=None,
|
||||||
|
)
|
||||||
|
|
||||||
def test_generate_two_fields_filter_different_operations(self):
|
def test_generate_two_fields_filter_different_operations(self):
|
||||||
actual = self.collector.gen_filter(
|
actual = self.collector.gen_filter(
|
||||||
cop='>=',
|
cop='>=',
|
||||||
@ -160,8 +188,8 @@ class GnocchiCollectorAggregationOperationTest(tests.TestCase):
|
|||||||
self.start = datetime.datetime(2019, 1, 1, tzinfo=tz.tzutc())
|
self.start = datetime.datetime(2019, 1, 1, tzinfo=tz.tzutc())
|
||||||
self.end = datetime.datetime(2019, 1, 1, 1, tzinfo=tz.tzutc())
|
self.end = datetime.datetime(2019, 1, 1, 1, tzinfo=tz.tzutc())
|
||||||
|
|
||||||
def do_test(self, expected_op, extra_args=None):
|
def do_test(self, expected_op, extra_args=None, conf=None):
|
||||||
conf = {
|
conf = conf or {
|
||||||
'metrics': {
|
'metrics': {
|
||||||
'metric_one': {
|
'metric_one': {
|
||||||
'unit': 'GiB',
|
'unit': 'GiB',
|
||||||
@ -172,16 +200,38 @@ class GnocchiCollectorAggregationOperationTest(tests.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
coll = gnocchi.GnocchiCollector(period=3600, conf=conf)
|
coll = gnocchi.GnocchiCollector(period=3600, conf=conf)
|
||||||
with mock.patch.object(coll._conn.aggregates, 'fetch') as fetch_mock:
|
|
||||||
coll._fetch_metric('metric_one', self.start, self.end)
|
for c in coll.conf:
|
||||||
fetch_mock.assert_called_once_with(
|
with mock.patch.object(coll._conn.aggregates,
|
||||||
expected_op,
|
'fetch') as fetch_mock:
|
||||||
groupby=['project_id', 'id'],
|
coll._fetch_metric(c, self.start, self.end)
|
||||||
resource_type='resource_x',
|
fetch_mock.assert_called_once_with(
|
||||||
search={'=': {'type': 'resource_x'}},
|
expected_op,
|
||||||
start=self.start, stop=self.end,
|
groupby=['project_id', 'id'],
|
||||||
granularity=3600
|
resource_type='resource_x',
|
||||||
)
|
search={'=': {'type': 'resource_x'}},
|
||||||
|
start=self.start, stop=self.end,
|
||||||
|
granularity=3600
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_multiple_confs(self):
|
||||||
|
conf = {
|
||||||
|
'metrics': {
|
||||||
|
'metric_one': [{
|
||||||
|
'alt_name': 'foo',
|
||||||
|
'unit': 'GiB',
|
||||||
|
'groupby': ['project_id'],
|
||||||
|
'extra_args': {'resource_type': 'resource_x'},
|
||||||
|
}, {
|
||||||
|
'alt_name': 'bar',
|
||||||
|
'unit': 'GiB',
|
||||||
|
'groupby': ['project_id'],
|
||||||
|
'extra_args': {'resource_type': 'resource_x'},
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
expected_op = ["aggregate", "max", ["metric", "metric_one", "max"]]
|
||||||
|
self.do_test(expected_op, conf=conf)
|
||||||
|
|
||||||
def test_no_agg_no_re_agg(self):
|
def test_no_agg_no_re_agg(self):
|
||||||
extra_args = {'resource_type': 'resource_x'}
|
extra_args = {'resource_type': 'resource_x'}
|
||||||
|
@ -355,14 +355,19 @@ class WorkerTest(tests.TestCase):
|
|||||||
self, update_scope_processing_state_db_mock,
|
self, update_scope_processing_state_db_mock,
|
||||||
persist_rating_data_mock, execute_measurements_rating_mock,
|
persist_rating_data_mock, execute_measurements_rating_mock,
|
||||||
do_collection_mock):
|
do_collection_mock):
|
||||||
self.worker._conf = {"metrics": {"metric1": "s", "metric2": "d"}}
|
self.worker._collector = collector.gnocchi.GnocchiCollector(
|
||||||
|
period=3600,
|
||||||
|
conf=tests.samples.DEFAULT_METRICS_CONF,
|
||||||
|
)
|
||||||
do_collection_mock.return_value = None
|
do_collection_mock.return_value = None
|
||||||
|
|
||||||
timestamp_now = tzutils.localized_now()
|
timestamp_now = tzutils.localized_now()
|
||||||
self.worker.do_execute_scope_processing(timestamp_now)
|
self.worker.do_execute_scope_processing(timestamp_now)
|
||||||
|
|
||||||
do_collection_mock.assert_has_calls([
|
do_collection_mock.assert_has_calls([
|
||||||
mock.call(["metric1", "metric2"], timestamp_now)
|
mock.call(['cpu@#instance', 'image.size', 'ip.floating',
|
||||||
|
'network.incoming.bytes', 'network.outgoing.bytes',
|
||||||
|
'radosgw.objects.size', 'volume.size'], timestamp_now)
|
||||||
])
|
])
|
||||||
|
|
||||||
self.assertFalse(execute_measurements_rating_mock.called)
|
self.assertFalse(execute_measurements_rating_mock.called)
|
||||||
@ -378,7 +383,10 @@ class WorkerTest(tests.TestCase):
|
|||||||
self, update_scope_processing_state_db_mock,
|
self, update_scope_processing_state_db_mock,
|
||||||
persist_rating_data_mock, execute_measurements_rating_mock,
|
persist_rating_data_mock, execute_measurements_rating_mock,
|
||||||
do_collection_mock):
|
do_collection_mock):
|
||||||
self.worker._conf = {"metrics": {"metric1": "s", "metric2": "d"}}
|
self.worker._collector = collector.gnocchi.GnocchiCollector(
|
||||||
|
period=3600,
|
||||||
|
conf=tests.samples.DEFAULT_METRICS_CONF,
|
||||||
|
)
|
||||||
|
|
||||||
usage_data_mock = {"some_usage_data": 2}
|
usage_data_mock = {"some_usage_data": 2}
|
||||||
do_collection_mock.return_value = usage_data_mock
|
do_collection_mock.return_value = usage_data_mock
|
||||||
@ -391,7 +399,9 @@ class WorkerTest(tests.TestCase):
|
|||||||
self.worker.do_execute_scope_processing(timestamp_now)
|
self.worker.do_execute_scope_processing(timestamp_now)
|
||||||
|
|
||||||
do_collection_mock.assert_has_calls([
|
do_collection_mock.assert_has_calls([
|
||||||
mock.call(["metric1", "metric2"], timestamp_now)
|
mock.call(['cpu@#instance', 'image.size', 'ip.floating',
|
||||||
|
'network.incoming.bytes', 'network.outgoing.bytes',
|
||||||
|
'radosgw.objects.size', 'volume.size'], timestamp_now)
|
||||||
])
|
])
|
||||||
|
|
||||||
end_time = tzutils.add_delta(
|
end_time = tzutils.add_delta(
|
||||||
|
@ -288,6 +288,33 @@ specified. The extra args for each collector are detailed below.
|
|||||||
Gnocchi
|
Gnocchi
|
||||||
~~~~~~~
|
~~~~~~~
|
||||||
|
|
||||||
|
Besides the common configuration, the Gnocchi collector also accepts a list of
|
||||||
|
rating types definitions for each metric. Using a list of rating types
|
||||||
|
definitions allows operators to rate different aspects of the same resource
|
||||||
|
type collected through the same metric in Gnocchi, otherwise operators would
|
||||||
|
need to create multiple metrics in Gnocchi to create multiple rating types in
|
||||||
|
CloudKitty.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
metrics:
|
||||||
|
instance.metric:
|
||||||
|
- unit: instance
|
||||||
|
alt_name: flavor
|
||||||
|
mutate: NUMBOOL
|
||||||
|
groupby:
|
||||||
|
- id
|
||||||
|
metadata:
|
||||||
|
- flavor_id
|
||||||
|
- unit: instance
|
||||||
|
alt_name: operating_system_license
|
||||||
|
mutate: NUMBOOL
|
||||||
|
groupby:
|
||||||
|
- id
|
||||||
|
metadata:
|
||||||
|
- os_license
|
||||||
|
|
||||||
|
|
||||||
.. note:: In order to retrieve metrics from Gnocchi, Cloudkitty uses the
|
.. note:: In order to retrieve metrics from Gnocchi, Cloudkitty uses the
|
||||||
dynamic aggregates endpoint. It builds an operation of the following
|
dynamic aggregates endpoint. It builds an operation of the following
|
||||||
format: ``(aggregate RE_AGGREGATION_METHOD (metric METRIC_NAME
|
format: ``(aggregate RE_AGGREGATION_METHOD (metric METRIC_NAME
|
||||||
|
@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
features:
|
||||||
|
- |
|
||||||
|
Extends the Gnocchi collector to allow operators
|
||||||
|
to create multiple rating types for the same metric in Gnocchi.
|
||||||
|
The Gnocchi collector is now accepting a list of configs for each
|
||||||
|
metric entry, instead of accepting only one configuration.
|
Loading…
Reference in New Issue
Block a user