gnocchi: use gnocchiclient instead of requests

Depends-On: Ifc19e485267b321cfbd42e7ae977462826820526
Change-Id: I60cf8a940134db7a2c298861ab26fe202d2181f3
This commit is contained in:
Mehdi Abaakouk 2015-11-26 09:36:01 +01:00 committed by Julien Danjou
parent 46da8efef0
commit fa3a982a27
5 changed files with 204 additions and 259 deletions

View File

@ -13,9 +13,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient import client
from gnocchiclient import exceptions
from oslo_serialization import jsonutils
import pecan
import requests
import wsme
from wsme import types as wtypes
@ -60,19 +61,20 @@ class AlarmGnocchiThresholdRule(base.AlarmRule):
# @cachetools.ttl_cache(maxsize=1, ttl=600)
@staticmethod
def _get_aggregation_methods():
ks_client = keystone_client.get_client(pecan.request.cfg)
gnocchi_url = pecan.request.cfg.gnocchi_url
headers = {'Content-Type': "application/json",
'X-Auth-Token': keystone_client.get_auth_token(ks_client)}
try:
r = requests.get("%s/v1/capabilities" % gnocchi_url,
headers=headers)
except requests.ConnectionError as e:
raise GnocchiUnavailable(e)
if r.status_code // 200 != 1:
raise GnocchiUnavailable(r.text)
conf = pecan.request.cfg
gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
interface=conf.service_credentials.interface,
region_name=conf.service_credentials.region_name,
endpoint_override=conf.gnocchi_url)
return jsonutils.loads(r.text).get('aggregation_methods', [])
try:
return gnocchi_client.capabilities.list().get(
'aggregation_methods', [])
except exceptions.ClientException as e:
raise base.ClientSideError(e.message, status_code=e.code)
except Exception as e:
raise GnocchiUnavailable(e)
class MetricOfResourceRule(AlarmGnocchiThresholdRule):
@ -99,23 +101,21 @@ class MetricOfResourceRule(AlarmGnocchiThresholdRule):
super(MetricOfResourceRule,
cls).validate_alarm(alarm)
conf = pecan.request.cfg
gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
interface=conf.service_credentials.interface,
region_name=conf.service_credentials.region_name,
endpoint_override=conf.gnocchi_url)
rule = alarm.gnocchi_resources_threshold_rule
ks_client = keystone_client.get_client(pecan.request.cfg)
gnocchi_url = pecan.request.cfg.gnocchi_url
headers = {'Content-Type': "application/json",
'X-Auth-Token': keystone_client.get_auth_token(ks_client)}
try:
r = requests.get("%s/v1/resource/%s/%s" % (
gnocchi_url, rule.resource_type,
rule.resource_id),
headers=headers)
except requests.ConnectionError as e:
gnocchi_client.resource.get(rule.resource_type,
rule.resource_id)
except exceptions.ClientException as e:
raise base.ClientSideError(e.message, status_code=e.code)
except Exception as e:
raise GnocchiUnavailable(e)
if r.status_code == 404:
raise base.EntityNotFound('gnocchi resource',
rule.resource_id)
elif r.status_code // 200 != 1:
raise base.ClientSideError(r.content, status_code=r.status_code)
class AggregationMetricByResourcesLookupRule(AlarmGnocchiThresholdRule):
@ -156,31 +156,28 @@ class AggregationMetricByResourcesLookupRule(AlarmGnocchiThresholdRule):
# Scope the alarm to the project id if needed
auth_project = v2_utils.get_auth_project(alarm.project_id)
if auth_project:
rule.query = jsonutils.dumps({
"and": [{"=": {"created_by_project_id": auth_project}},
query]})
query = {"and": [{"=": {"created_by_project_id": auth_project}},
query]}
rule.query = jsonutils.dumps(query)
# Delegate the query validation to gnocchi
ks_client = keystone_client.get_client(pecan.request.cfg)
request = {
'url': "%s/v1/aggregation/resource/%s/metric/%s" % (
pecan.request.cfg.gnocchi_url,
rule.resource_type,
rule.metric),
'headers': {'Content-Type': "application/json",
'X-Auth-Token': keystone_client.get_auth_token(
ks_client)},
'params': {'aggregation': rule.aggregation_method,
'needed_overlap': 0},
'data': rule.query,
}
conf = pecan.request.cfg
gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
interface=conf.service_credentials.interface,
region_name=conf.service_credentials.region_name,
endpoint_override=conf.gnocchi_url)
try:
r = requests.post(**request)
except requests.ConnectionError as e:
gnocchi_client.metric.aggregation(
metrics=rule.metric,
query=query,
aggregation=rule.aggregation_method,
needed_overlap=0,
resource_type=rule.resource_type)
except exceptions.ClientException as e:
raise base.ClientSideError(e.message, status_code=e.code)
except Exception as e:
raise GnocchiUnavailable(e)
if r.status_code // 200 != 1:
raise base.ClientSideError(r.content, status_code=r.status_code)
class AggregationMetricsByIdLookupRule(AlarmGnocchiThresholdRule):

View File

@ -13,10 +13,10 @@
# License for the specific language governing permissions and limitations
# under the License.
from gnocchiclient import client
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
import requests
from aodh.evaluator import threshold
from aodh.i18n import _
@ -27,8 +27,8 @@ LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('gnocchi_url',
deprecated_group="alarm",
default="http://localhost:8041",
help='URL to Gnocchi.'),
deprecated_for_removal=True,
help='URL to Gnocchi. default: autodetection'),
]
@ -36,61 +36,46 @@ class GnocchiThresholdEvaluator(threshold.ThresholdEvaluator):
def __init__(self, conf):
super(threshold.ThresholdEvaluator, self).__init__(conf)
self.gnocchi_url = conf.gnocchi_url
def _get_headers(self, content_type="application/json"):
return {
'Content-Type': content_type,
'X-Auth-Token': keystone_client.get_auth_token(self.ks_client),
}
self._gnocchi_client = client.Client(
'1', keystone_client.get_session(conf),
interface=conf.service_credentials.interface,
region_name=conf.service_credentials.region_name,
endpoint_override=conf.gnocchi_url)
def _statistics(self, alarm, start, end):
"""Retrieve statistics over the current window."""
method = 'get'
req = {
'url': self.gnocchi_url + "/v1",
'headers': self._get_headers(),
'params': {
'aggregation': alarm.rule['aggregation_method'],
'start': start,
'end': end,
}
}
if alarm.type == 'gnocchi_aggregation_by_resources_threshold':
method = 'post'
req['url'] += "/aggregation/resource/%s/metric/%s" % (
alarm.rule['resource_type'], alarm.rule['metric'])
req['data'] = alarm.rule['query']
# FIXME(sileht): In case of a heat autoscaling stack decide to
# delete an instance, the gnocchi metrics associated to this
# instance will be no more updated and when the alarm will ask
# for the aggregation, gnocchi will raise a 'No overlap' exception.
# So temporary set 'needed_overlap' to 0 to disable the
# gnocchi checks about missing points. For more detail see:
# https://bugs.launchpad.net/gnocchi/+bug/1479429
req['params']['needed_overlap'] = 0
elif alarm.type == 'gnocchi_aggregation_by_metrics_threshold':
req['url'] += "/aggregation/metric"
req['params']['metric'] = alarm.rule['metrics']
elif alarm.type == 'gnocchi_resources_threshold':
req['url'] += "/resource/%s/%s/metric/%s/measures" % (
alarm.rule['resource_type'],
alarm.rule['resource_id'], alarm.rule['metric'])
LOG.debug('stats query %s', req['url'])
try:
r = getattr(requests, method)(**req)
if alarm.type == 'gnocchi_aggregation_by_resources_threshold':
# FIXME(sileht): In case of a heat autoscaling stack decide to
# delete an instance, the gnocchi metrics associated to this
# instance will be no more updated and when the alarm will ask
# for the aggregation, gnocchi will raise a 'No overlap'
# exception.
# So temporary set 'needed_overlap' to 0 to disable the
# gnocchi checks about missing points. For more detail see:
# https://bugs.launchpad.net/gnocchi/+bug/1479429
return self._gnocchi_client.metric.aggregation(
metrics=alarm.rule['metric'],
query=jsonutils.loads(alarm.rule['query']),
resource_type=alarm.rule["resource_type"],
start=start, stop=end,
aggregation=alarm.rule['aggregation_method'],
needed_overlap=0,
)
elif alarm.type == 'gnocchi_aggregation_by_metrics_threshold':
return self._gnocchi_client.metric.aggregation(
metrics=alarm.rule['metrics'],
start=start, stop=end,
aggregation=alarm.rule['aggregation_method'])
elif alarm.type == 'gnocchi_resources_threshold':
return self._gnocchi_client.metric.get_measures(
metric=alarm.rule['metric'],
start=start, stop=end,
resource_id=alarm.rule['resource_id'],
aggregation=alarm.rule['aggregation_method'])
except Exception:
LOG.exception(_('alarm stats retrieval failed'))
return []
if int(r.status_code / 100) != 2:
LOG.exception(_('alarm stats retrieval failed: %s') % r.text)
return []
else:
return jsonutils.loads(r.text)
@staticmethod
def _sanitize(alarm, statistics):
@ -100,9 +85,10 @@ class GnocchiThresholdEvaluator(threshold.ThresholdEvaluator):
# we could potentially do a mean-of-means (or max-of-maxes or whatever,
# but not a stddev-of-stddevs).
# TODO(sileht): support alarm['exclude_outliers']
LOG.debug('sanitize stats %s', statistics)
LOG.error('sanitize (%s) stats %s', alarm.rule['granularity'],
statistics)
statistics = [stats[2] for stats in statistics
if stats[1] == alarm.rule['granularity']]
statistics = statistics[-alarm.rule['evaluation_periods']:]
LOG.debug('pruned statistics to %d', len(statistics))
LOG.error('pruned statistics to %d', len(statistics))
return statistics

View File

@ -18,13 +18,12 @@ import datetime
import os
import uuid
from gnocchiclient import exceptions
import mock
import oslo_messaging.conffixture
from oslo_serialization import jsonutils
import requests
import six
from six import moves
import six.moves.urllib.parse as urlparse
from aodh import messaging
from aodh.storage import models
@ -2846,8 +2845,7 @@ class TestAlarmsRuleGnocchi(TestAlarmsBase):
for r in data
if 'gnocchi_resources_threshold_rule' in r))
@mock.patch('aodh.keystone_client.get_client')
def test_post_gnocchi_resources_alarm(self, __):
def test_post_gnocchi_resources_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
@ -2870,53 +2868,44 @@ class TestAlarmsRuleGnocchi(TestAlarmsBase):
}
}
with mock.patch('requests.get',
side_effect=requests.ConnectionError()):
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.side_effect = Exception("boom!")
resp = self.post_json('/alarms', params=json,
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(503, resp.status_code, resp.body)
with mock.patch('requests.get',
return_value=mock.Mock(status_code=500,
body="my_custom_error",
text="my_custom_error")):
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.side_effect = (
exceptions.ClientException(500, "my_custom_error"))
resp = self.post_json('/alarms', params=json,
headers=self.auth_headers,
expect_errors=True)
self.assertEqual(503, resp.status_code, resp.body)
self.assertEqual(500, resp.status_code, resp.body)
self.assertIn('my_custom_error',
resp.json['error_message']['faultstring'])
cap_result = mock.Mock(status_code=201,
text=jsonutils.dumps(
{'aggregation_methods': ['count']}))
resource_result = mock.Mock(status_code=200, text="blob")
with mock.patch('requests.get', side_effect=[cap_result,
resource_result]
) as gnocchi_get:
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
gnocchi_url = self.CONF.gnocchi_url
capabilities_url = urlparse.urljoin(gnocchi_url,
'/v1/capabilities')
resource_url = urlparse.urljoin(
gnocchi_url,
'/v1/resource/instance/209ef69c-c10c-4efb-90ff-46f4b2d90d2e'
)
expected = [mock.call(capabilities_url,
headers=mock.ANY),
mock.call(resource_url,
headers=mock.ANY)]
self.assertEqual(expected, gnocchi_get.mock_calls)
expected = [mock.call.capabilities.list(),
mock.call.resource.get(
"instance",
"209ef69c-c10c-4efb-90ff-46f4b2d90d2e")]
self.assertEqual(expected, c.mock_calls)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
@mock.patch('aodh.keystone_client.get_client')
def test_post_gnocchi_metrics_alarm(self, __):
def test_post_gnocchi_metrics_alarm(self):
json = {
'enabled': False,
'name': 'name_post',
@ -2938,19 +2927,19 @@ class TestAlarmsRuleGnocchi(TestAlarmsBase):
}
}
cap_result = mock.Mock(status_code=200,
text=jsonutils.dumps(
{'aggregation_methods': ['count']}))
with mock.patch('requests.get', return_value=cap_result):
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
self._verify_alarm(json, alarms[0])
@mock.patch('aodh.keystone_client.get_client')
def test_post_gnocchi_aggregation_alarm_project_constraint(self, __):
self.CONF.set_override('gnocchi_url', 'http://localhost:8041')
def test_post_gnocchi_aggregation_alarm_project_constraint(self):
json = {
'enabled': False,
'name': 'project_constraint',
@ -2973,39 +2962,31 @@ class TestAlarmsRuleGnocchi(TestAlarmsBase):
}
}
cap_result = mock.Mock(status_code=201,
text=jsonutils.dumps(
{'aggregation_methods': ['count']}))
resource_result = mock.Mock(status_code=200, text="blob")
query_check_result = mock.Mock(status_code=200, text="blob")
expected_query = {"and": [{"=": {"created_by_project_id":
self.auth_headers['X-Project-Id']}},
{"=": {"server_group":
"my_autoscaling_group"}}]}
expected_query = ('{"and": [{"=": {"created_by_project_id": "%s"}}, '
'{"=": {"server_group": "my_autoscaling_group"}}]}' %
self.auth_headers['X-Project-Id'])
with mock.patch('aodh.api.controllers.v2.alarm_rules.'
'gnocchi.client') as clientlib:
c = clientlib.Client.return_value
c.capabilities.list.return_value = {
'aggregation_methods': ['count']}
self.post_json('/alarms', params=json, headers=self.auth_headers)
with mock.patch('requests.get',
side_effect=[cap_result, resource_result]):
with mock.patch('requests.post',
side_effect=[query_check_result]) as fake_post:
self.post_json('/alarms', params=json,
headers=self.auth_headers)
self.assertEqual([mock.call(
url=('http://localhost:8041/v1/aggregation/'
'resource/instance/metric/ameter'),
headers={'Content-Type': 'application/json',
'X-Auth-Token': mock.ANY},
params={'aggregation': 'count',
'needed_overlap': 0},
data=expected_query)],
fake_post.mock_calls),
self.assertEqual([mock.call(
aggregation='count',
metrics='ameter',
needed_overlap=0,
query=expected_query,
resource_type="instance")],
c.metric.aggregation.mock_calls),
alarms = list(self.alarm_conn.get_alarms(enabled=False))
self.assertEqual(1, len(alarms))
json['gnocchi_aggregation_by_resources_threshold_rule']['query'] = (
expected_query)
jsonutils.dumps(expected_query))
self._verify_alarm(json, alarms[0])

View File

@ -17,8 +17,8 @@ import datetime
import unittest
import uuid
from gnocchiclient import exceptions
import mock
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslotest import mockpatch
import pytz
@ -31,25 +31,15 @@ from aodh.tests import constants
from aodh.tests.unit.evaluator import base
class FakeResponse(object):
def __init__(self, code, data):
if code == 200:
self.values = [d[2] for d in data]
else:
self.values = []
self.text = jsonutils.dumps(data)
self.status_code = code
class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
EVALUATOR = gnocchi.GnocchiThresholdEvaluator
def setUp(self):
self.client = self.useFixture(mockpatch.Patch(
'aodh.evaluator.gnocchi.client'
)).mock.Client.return_value
super(TestGnocchiThresholdEvaluate, self).setUp()
self.requests = self.useFixture(mockpatch.Patch(
'aodh.evaluator.gnocchi.requests')).mock
def prepare_alarms(self):
self.alarms = [
models.Alarm(name='instance_running_hot',
@ -133,9 +123,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
@staticmethod
def _get_stats(granularity, values):
now = timeutils.utcnow_ts()
return FakeResponse(
200, [[six.text_type(now - len(values) * granularity),
granularity, value] for value in values])
return [[six.text_type(now - len(values) * granularity),
granularity, value] for value in values]
@staticmethod
def _reason_data(disposition, count, most_recent):
@ -153,11 +142,13 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(4)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] - v
for v in moves.xrange(6)])
self.requests.get.side_effect = [Exception('boom'),
FakeResponse(500, "error"),
means,
maxs]
self.requests.post.side_effect = [FakeResponse(500, "error"), avgs2]
self.client.metric.get_measures.side_effect = [
exceptions.ClientException(501, "error2"),
means]
self.client.metric.aggregation.side_effect = [
Exception('boom'),
exceptions.ClientException(500, "error"),
maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('insufficient data')
self._evaluate_all_alarms()
@ -165,8 +156,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
def test_simple_insufficient(self):
self._set_all_alarms('ok')
self.requests.get.return_value = FakeResponse(200, [])
self.requests.post.return_value = FakeResponse(200, [])
self.client.metric.get_measures.return_value = []
self.client.metric.aggregation.return_value = []
self._evaluate_all_alarms()
self._assert_all_alarms('insufficient data')
expected = [mock.call(alarm) for alarm in self.alarms]
@ -194,58 +185,46 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(1, 7)])
self.requests.get.side_effect = [avgs, maxs]
self.requests.post.side_effect = [avgs2]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
expected_headers = {'X-Auth-Token': mock.ANY,
'Content-Type': 'application/json'}
start_alarm1 = "2015-01-26T12:51:00"
start_alarm2 = "2015-01-26T12:32:00"
start_alarm3 = "2015-01-26T12:51:10"
end = "2015-01-26T12:57:00"
self.assertEqual([
mock.call(url='http://localhost:8041/v1/resource/instance/'
'my_instance/metric/cpu_util/measures',
params={'aggregation': 'mean',
'start': start_alarm1, 'end': end},
headers=expected_headers),
mock.call(url='http://localhost:8041/v1/aggregation/metric',
params={'aggregation': 'max',
'start': start_alarm2, 'end': end,
'metric': [
'0bb1604d-1193-4c0a-b4b8-74b170e35e83',
'9ddc209f-42f8-41e1-b8f1-8804f59c4053']},
headers=expected_headers)],
self.requests.get.mock_calls)
self.assertEqual([
mock.call(url='http://localhost:8041/v1/aggregation/resource/'
'instance/metric/cpu_util',
params={'aggregation': 'mean',
'start': start_alarm3, 'end': end,
'needed_overlap': 0},
data='{"=": {"server_group": "my_autoscaling_group"}}',
headers=expected_headers),
],
self.requests.post.mock_calls)
mock.call.get_measures(aggregation='mean', metric='cpu_util',
resource_id='my_instance',
start=start_alarm1, stop=end),
mock.call.aggregation(aggregation='max',
metrics=[
'0bb1604d-1193-4c0a-b4b8-74b170e35e83',
'9ddc209f-42f8-41e1-b8f1-8804f59c4053'],
start=start_alarm2, stop=end),
mock.call.aggregation(aggregation='mean', metrics='cpu_util',
needed_overlap=0,
query={"=": {"server_group":
"my_autoscaling_group"}},
resource_type='instance',
start=start_alarm3, stop=end),
], self.client.metric.mock_calls)
self._assert_all_alarms('alarm')
expected = [mock.call(alarm) for alarm in self.alarms]
update_calls = self.storage_conn.update_alarm.call_args_list
self.assertEqual(expected, update_calls)
reasons = ['Transition to alarm due to 5 samples outside'
' threshold, most recent: %s' % avgs.values[-1],
' threshold, most recent: %s' % avgs[-1][2],
'Transition to alarm due to 4 samples outside'
' threshold, most recent: %s' % maxs.values[-1],
' threshold, most recent: %s' % maxs[-1][2],
'Transition to alarm due to 6 samples outside'
' threshold, most recent: %s' % avgs2.values[-1],
' threshold, most recent: %s' % avgs2[-1][2],
]
reason_datas = [self._reason_data('outside', 5, avgs.values[-1]),
self._reason_data('outside', 4, maxs.values[-1]),
self._reason_data('outside', 6, avgs2.values[-1])]
reason_datas = [self._reason_data('outside', 5, avgs[-1][2]),
self._reason_data('outside', 4, maxs[-1][2]),
self._reason_data('outside', 6, avgs2[-1][2])]
expected = [mock.call(alarm, 'ok', reason, reason_data)
for alarm, reason, reason_data
in zip(self.alarms, reasons, reason_datas)]
@ -259,22 +238,22 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(1, 5)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] - v
for v in moves.xrange(6)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('ok')
expected = [mock.call(alarm) for alarm in self.alarms]
update_calls = self.storage_conn.update_alarm.call_args_list
self.assertEqual(expected, update_calls)
reasons = ['Transition to ok due to 5 samples inside'
' threshold, most recent: %s' % avgs.values[-1],
' threshold, most recent: %s' % avgs[-1][2],
'Transition to ok due to 4 samples inside'
' threshold, most recent: %s' % maxs.values[-1],
' threshold, most recent: %s' % maxs[-1][2],
'Transition to ok due to 6 samples inside'
' threshold, most recent: %s' % avgs2.values[-1]]
reason_datas = [self._reason_data('inside', 5, avgs.values[-1]),
self._reason_data('inside', 4, maxs.values[-1]),
self._reason_data('inside', 6, avgs2.values[-1])]
' threshold, most recent: %s' % avgs2[-1][2]]
reason_datas = [self._reason_data('inside', 5, avgs[-1][2]),
self._reason_data('inside', 4, maxs[-1][2]),
self._reason_data('inside', 6, avgs2[-1][2])]
expected = [mock.call(alarm, 'alarm', reason, reason_data)
for alarm, reason, reason_data
in zip(self.alarms, reasons, reason_datas)]
@ -288,8 +267,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(-1, 3)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(6)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('ok')
self.assertEqual(
@ -304,7 +283,7 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
# the test if the evaluator doesn't remove it.
maxs = self._get_stats(300, [self.alarms[0].rule['threshold'] - v
for v in moves.xrange(-1, 5)])
self.requests.get.side_effect = [maxs]
self.client.metric.aggregation.side_effect = [maxs]
self._evaluate_all_alarms()
self._assert_all_alarms('alarm')
@ -317,8 +296,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(-1, 3)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(6)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('ok')
self.assertEqual([], self.storage_conn.update_alarm.call_args_list)
@ -337,8 +316,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(4)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(6)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('alarm')
self.assertEqual([], self.storage_conn.update_alarm.call_args_list)
@ -359,22 +338,22 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(4)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(1, 7)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('alarm')
expected = [mock.call(alarm) for alarm in self.alarms]
update_calls = self.storage_conn.update_alarm.call_args_list
self.assertEqual(expected, update_calls)
reasons = ['Transition to alarm due to 5 samples outside'
' threshold, most recent: %s' % avgs.values[-1],
' threshold, most recent: %s' % avgs[-1][2],
'Transition to alarm due to 4 samples outside'
' threshold, most recent: %s' % maxs.values[-1],
' threshold, most recent: %s' % maxs[-1][2],
'Transition to alarm due to 6 samples outside'
' threshold, most recent: %s' % avgs2.values[-1]]
reason_datas = [self._reason_data('outside', 5, avgs.values[-1]),
self._reason_data('outside', 4, maxs.values[-1]),
self._reason_data('outside', 6, avgs2.values[-1])]
' threshold, most recent: %s' % avgs2[-1][2]]
reason_datas = [self._reason_data('outside', 5, avgs[-1][2]),
self._reason_data('outside', 4, maxs[-1][2]),
self._reason_data('outside', 6, avgs2[-1][2])]
expected = [mock.call(alarm, 'ok', reason, reason_data)
for alarm, reason, reason_data
in zip(self.alarms, reasons, reason_datas)]
@ -388,22 +367,22 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
for v in moves.xrange(4)])
avgs2 = self._get_stats(50, [self.alarms[2].rule['threshold'] + v
for v in moves.xrange(1, 7)])
self.requests.post.side_effect = [avgs2]
self.requests.get.side_effect = [avgs, maxs]
self.client.metric.get_measures.side_effect = [avgs]
self.client.metric.aggregation.side_effect = [maxs, avgs2]
self._evaluate_all_alarms()
self._assert_all_alarms('alarm')
expected = [mock.call(alarm) for alarm in self.alarms]
update_calls = self.storage_conn.update_alarm.call_args_list
self.assertEqual(expected, update_calls)
reasons = ['Transition to alarm due to 5 samples outside'
' threshold, most recent: %s' % avgs.values[-1],
' threshold, most recent: %s' % avgs[-1][2],
'Transition to alarm due to 4 samples outside'
' threshold, most recent: %s' % maxs.values[-1],
' threshold, most recent: %s' % maxs[-1][2],
'Transition to alarm due to 6 samples outside'
' threshold, most recent: %s' % avgs2.values[-1]]
reason_datas = [self._reason_data('outside', 5, avgs.values[-1]),
self._reason_data('outside', 4, maxs.values[-1]),
self._reason_data('outside', 6, avgs2.values[-1])]
' threshold, most recent: %s' % avgs2[-1][2]]
reason_datas = [self._reason_data('outside', 5, avgs[-1][2]),
self._reason_data('outside', 4, maxs[-1][2]),
self._reason_data('outside', 6, avgs2[-1][2])]
expected = [mock.call(alarm, 'insufficient data',
reason, reason_data)
for alarm, reason, reason_data
@ -427,7 +406,8 @@ class TestGnocchiThresholdEvaluate(base.TestEvaluatorBase):
dt = datetime.datetime(2014, 1, 1, 15, 0, 0,
tzinfo=pytz.timezone('Europe/Ljubljana'))
mock_utcnow.return_value = dt.astimezone(pytz.UTC)
self.requests.get.return_value = []
self.client.metric.get_measures.return_value = []
self.client.metric.aggregation.return_value = []
self._evaluate_all_alarms()
self._assert_all_alarms('ok')
update_calls = self.storage_conn.update_alarm.call_args_list

View File

@ -6,6 +6,7 @@ retrying!=1.3.0,>=1.2.3 # Apache-2.0
croniter>=0.3.4 # MIT License
jsonschema!=2.5.0,<3.0.0,>=2.0.0
keystonemiddleware>=2.2.0
gnocchiclient>=2.1.0 # Apache-2.0
lxml>=2.3
oslo.context>=0.2.0 # Apache-2.0
oslo.db>=1.12.0 # Apache-2.0