Merge "Add gnocchi alarm rules"

This commit is contained in:
Jenkins
2016-02-16 16:40:25 +00:00
committed by Gerrit Code Review
5 changed files with 583 additions and 22 deletions

View File

@@ -12,6 +12,7 @@
import uuid
import six
from tempest_lib import exceptions
from aodhclient.tests.functional import base
@@ -212,3 +213,389 @@ class AodhClientTest(base.ClientTestBase):
result = self.aodh('alarm', params="list --type threshold")
self.assertNotIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
class AodhClientGnocchiRulesTest(base.ClientTestBase):
def test_gnocchi_resources_threshold_scenario(self):
PROJECT_ID = str(uuid.uuid4())
# NOTE(gordc): taken from setup-tests.sh
RESOURCE_ID = '6868DA77-FA82-4E67-ABA9-270C5AE8CBCA'
# CREATE
result = self.aodh(u'alarm',
params=(u"create "
"--type gnocchi_resources_threshold "
"--name alarm1 --metric cpu_util "
"--threshold 80 "
"--resource-id %s --resource-type instance "
"--aggregation-method last "
"--project-id %s"
% (RESOURCE_ID, PROJECT_ID)))
alarm = self.details_multiple(result)[0]
ALARM_ID = alarm['alarm_id']
self.assertEqual('alarm1', alarm['name'])
self.assertEqual('cpu_util', alarm['metric'])
self.assertEqual('80.0', alarm['threshold'])
self.assertEqual('last', alarm['aggregation_method'])
self.assertEqual('6868DA77-FA82-4E67-ABA9-270C5AE8CBCA',
alarm['resource_id'])
self.assertEqual('instance', alarm['resource_type'])
# CREATE FAIL
result = self.aodh(u'alarm',
params=(u"create "
"--type gnocchi_resources_threshold "
"--name alarm1 --metric cpu_util "
"--threshold 80 "
"--resource-id %s --resource-type instance "
"--aggregation-method last "
"--project-id %s"
% (RESOURCE_ID, PROJECT_ID)),
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
'Conflict (HTTP 409)')
# CREATE FAIL MISSING PARAM
self.assertRaises(exceptions.CommandFailed,
self.aodh, u'alarm',
params=(u"create "
"--type gnocchi_resources_threshold "
"--name alarm1 --metric cpu_util "
"--resource-id %s --resource-type instance "
"--aggregation-method last "
"--project-id %s"
% (RESOURCE_ID, PROJECT_ID)))
# UPDATE
result = self.aodh(
'alarm', params=("update %s --severity critical --threshold 90"
% ALARM_ID))
alarm_updated = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_updated["alarm_id"])
self.assertEqual('critical', alarm_updated['severity'])
self.assertEqual('90.0', alarm_updated["threshold"])
# GET
result = self.aodh(
'alarm', params="show %s" % ALARM_ID)
alarm_show = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_show["alarm_id"])
self.assertEqual(PROJECT_ID, alarm_show["project_id"])
self.assertEqual('alarm1', alarm_show['name'])
self.assertEqual('cpu_util', alarm_show['metric'])
self.assertEqual('90.0', alarm_show['threshold'])
self.assertEqual('critical', alarm_show['severity'])
self.assertEqual('last', alarm_show['aggregation_method'])
self.assertEqual('instance', alarm_show['resource_type'])
# LIST
result = self.aodh(
'alarm', params="list --type gnocchi_resources_threshold")
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH ALL
result = self.aodh(
'alarm', params=("search --type gnocchi_resources_threshold"))
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH SOME
result = self.aodh('alarm',
params=("search --type gnocchi_resources_threshold "
"--query "
"'{\"=\": {\"project_id\": \"%s\"}}'"
% PROJECT_ID))
alarm_list = self.parser.listing(result)[0]
self.assertEqual(ALARM_ID, alarm_list["alarm_id"])
self.assertEqual('alarm1', alarm_list['name'])
# DELETE
result = self.aodh('alarm', params="delete %s" % ALARM_ID)
self.assertEqual("", result)
# GET FAIL
result = self.aodh('alarm', params="show %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# DELETE FAIL
result = self.aodh('alarm', params="delete %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# LIST DOES NOT HAVE ALARM
result = self.aodh('alarm',
params="list --type gnocchi_resources_threshold")
self.assertNotIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
def test_gnocchi_aggr_by_resources_scenario(self):
PROJECT_ID = str(uuid.uuid4())
# CREATE
result = self.aodh(
u'alarm',
params=(u"create "
"--type "
"gnocchi_aggregation_by_resources_threshold "
"--name alarm1 --metric cpu --threshold 80 "
"--query "
"'{\"=\": {\"server_group\": \"my_group\"}}' "
"--resource-type instance "
"--aggregation-method last "
"--project-id %s" % PROJECT_ID))
alarm = self.details_multiple(result)[0]
ALARM_ID = alarm['alarm_id']
self.assertEqual('alarm1', alarm['name'])
self.assertEqual('cpu', alarm['metric'])
self.assertEqual('80.0', alarm['threshold'])
self.assertEqual('last', alarm['aggregation_method'])
self.assertEqual('instance', alarm['resource_type'])
self.assertEqual('{"=": {"server_group": "my_group"}}',
alarm['query'])
# CREATE FAIL
result = self.aodh(
u'alarm',
params=(u"create "
"--type "
"gnocchi_aggregation_by_resources_threshold "
"--name alarm1 --metric cpu --threshold 80 "
"--query "
"'{\"=\": {\"server_group\": \"my_group\"}}' "
"--resource-type instance "
"--aggregation-method last "
"--project-id %s" % PROJECT_ID),
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
'Conflict (HTTP 409)')
# CREATE FAIL MISSING PARAM
self.assertRaises(
exceptions.CommandFailed,
self.aodh, u'alarm',
params=(u"create "
"--type "
"gnocchi_aggregation_by_resources_threshold "
"--name alarm1 --metric cpu "
"--query "
"'{\"=\": {\"server_group\": \"my_group\"}}' "
"--resource-type instance "
"--aggregation-method last "
"--project-id %s" % PROJECT_ID))
# UPDATE
result = self.aodh(
'alarm', params=("update %s --severity critical --threshold 90"
% ALARM_ID))
alarm_updated = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_updated["alarm_id"])
self.assertEqual('critical', alarm_updated['severity'])
self.assertEqual('90.0', alarm_updated["threshold"])
# GET
result = self.aodh(
'alarm', params="show %s" % ALARM_ID)
alarm_show = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_show["alarm_id"])
self.assertEqual(PROJECT_ID, alarm_show["project_id"])
self.assertEqual('alarm1', alarm_show['name'])
self.assertEqual('cpu', alarm_show['metric'])
self.assertEqual('90.0', alarm_show['threshold'])
self.assertEqual('critical', alarm_show['severity'])
self.assertEqual('last', alarm_show['aggregation_method'])
self.assertEqual('instance', alarm_show['resource_type'])
# LIST
result = self.aodh(
'alarm', params="list --type "
"gnocchi_aggregation_by_resources_threshold")
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH ALL
result = self.aodh(
'alarm', params=("search --type "
"gnocchi_aggregation_by_resources_threshold"))
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH SOME
result = self.aodh(
'alarm', params=("search --type "
"gnocchi_aggregation_by_resources_threshold "
"--query '{\"=\": {\"project_id\": \"%s\"}}'"
% PROJECT_ID))
alarm_list = self.parser.listing(result)[0]
self.assertEqual(ALARM_ID, alarm_list["alarm_id"])
self.assertEqual('alarm1', alarm_list['name'])
# DELETE
result = self.aodh('alarm', params="delete %s" % ALARM_ID)
self.assertEqual("", result)
# GET FAIL
result = self.aodh('alarm', params="show %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# DELETE FAIL
result = self.aodh('alarm', params="delete %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# LIST DOES NOT HAVE ALARM
result = self.aodh('alarm', params="list --type "
"gnocchi_aggregation_by_resources_threshold")
self.assertNotIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
def test_gnocchi_aggr_by_metrics_scenario(self):
PROJECT_ID = str(uuid.uuid4())
METRIC1 = 'cpu'
METRIC2 = 'cpu_util'
# CREATE
result = self.aodh(
u'alarm',
params=(u"create "
"--type gnocchi_aggregation_by_metrics_threshold "
"--name alarm1 "
"--metrics %s "
"--metrics %s "
"--threshold 80 "
"--aggregation-method last "
"--project-id %s"
% (METRIC1, METRIC2, PROJECT_ID)))
alarm = self.details_multiple(result)[0]
ALARM_ID = alarm['alarm_id']
self.assertEqual('alarm1', alarm['name'])
metrics = "[u'cpu', u'cpu_util']" if six.PY2 else "['cpu', 'cpu_util']"
self.assertEqual(metrics, alarm['metrics'])
self.assertEqual('80.0', alarm['threshold'])
self.assertEqual('last', alarm['aggregation_method'])
# CREATE FAIL
result = self.aodh(
u'alarm',
params=(u"create "
"--type gnocchi_aggregation_by_metrics_threshold "
"--name alarm1 "
"--metrics %s "
"--metrics %s "
"--threshold 80 "
"--aggregation-method last "
"--project-id %s"
% (METRIC1, METRIC2, PROJECT_ID)),
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
'Conflict (HTTP 409)')
# CREATE FAIL MISSING PARAM
self.assertRaises(
exceptions.CommandFailed,
self.aodh, u'alarm',
params=(u"create "
"--type gnocchi_aggregation_by_metrics_threshold "
"--name alarm1 "
"--metrics %s "
"--metrics %s "
"--aggregation-method last "
"--project-id %s"
% (METRIC1, METRIC2, PROJECT_ID)))
# UPDATE
result = self.aodh(
'alarm', params=("update %s --severity critical --threshold 90"
% ALARM_ID))
alarm_updated = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_updated["alarm_id"])
self.assertEqual('critical', alarm_updated['severity'])
self.assertEqual('90.0', alarm_updated["threshold"])
# GET
result = self.aodh(
'alarm', params="show %s" % ALARM_ID)
alarm_show = self.details_multiple(result)[0]
self.assertEqual(ALARM_ID, alarm_show["alarm_id"])
self.assertEqual(PROJECT_ID, alarm_show["project_id"])
self.assertEqual('alarm1', alarm_show['name'])
self.assertEqual(metrics, alarm_show['metrics'])
self.assertEqual('90.0', alarm_show['threshold'])
self.assertEqual('critical', alarm_show['severity'])
self.assertEqual('last', alarm_show['aggregation_method'])
# LIST
result = self.aodh(
'alarm', params="list --type "
"gnocchi_aggregation_by_metrics_threshold")
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH ALL
result = self.aodh(
'alarm', params=("search --type "
"gnocchi_aggregation_by_metrics_threshold"))
self.assertIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])
for alarm_list in self.parser.listing(result):
if alarm_list["alarm_id"] == ALARM_ID:
self.assertEqual('alarm1', alarm_list['name'])
# SEARCH SOME
result = self.aodh(
'alarm',
params=("search --type "
"gnocchi_aggregation_by_metrics_threshold "
"--query '{\"=\": {\"project_id\": \"%s\"}}'"
% PROJECT_ID))
alarm_list = self.parser.listing(result)[0]
self.assertEqual(ALARM_ID, alarm_list["alarm_id"])
self.assertEqual('alarm1', alarm_list['name'])
# DELETE
result = self.aodh('alarm', params="delete %s" % ALARM_ID)
self.assertEqual("", result)
# GET FAIL
result = self.aodh('alarm', params="show %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# DELETE FAIL
result = self.aodh('alarm', params="delete %s" % ALARM_ID,
fail_ok=True, merge_stderr=True)
self.assertFirstLineStartsWith(result.split('\n'),
"Not found (HTTP 404)")
# LIST DOES NOT HAVE ALARM
result = self.aodh(
'alarm', params="list --type "
"gnocchi_aggregation_by_metrics_threshold")
self.assertNotIn(ALARM_ID,
[r['alarm_id'] for r in self.parser.listing(result)])

View File

@@ -68,6 +68,21 @@ class AlarmManager(base.Manager):
elif 'event_rule' in alarm_update:
alarm['event_rule'].update(alarm_update.get('event_rule'))
alarm_update.pop('event_rule')
elif 'gnocchi_resources_threshold_rule' in alarm_update:
alarm['gnocchi_resources_threshold_rule'].update(
alarm_update.get('gnocchi_resources_threshold_rule'))
alarm_update.pop('gnocchi_resources_threshold_rule')
elif 'gnocchi_aggregation_by_metrics_threshold_rule' in alarm_update:
alarm['gnocchi_aggregation_by_metrics_threshold_rule'].update(
alarm_update.get(
'gnocchi_aggregation_by_metrics_threshold_rule'))
alarm_update.pop('gnocchi_aggregation_by_metrics_threshold_rule')
elif 'gnocchi_aggregation_by_resources_threshold_rule' in alarm_update:
alarm['gnocchi_aggregation_by_resources_threshold_rule'].update(
alarm_update.get(
'gnocchi_aggregation_by_resources_threshold_rule'))
alarm_update.pop(
'gnocchi_aggregation_by_resources_threshold_rule')
alarm.update(alarm_update)
return self._put(

View File

@@ -17,7 +17,9 @@ from oslo_utils import strutils
from aodhclient import utils
ALARM_TYPES = ['threshold', 'event']
ALARM_TYPES = ['threshold', 'event', 'gnocchi_resources_threshold',
'gnocchi_aggregation_by_metrics_threshold',
'gnocchi_aggregation_by_resources_threshold']
ALARM_STATES = ['ok', 'alarm', 'insufficient data']
ALARM_SEVERITY = ['low', 'moderate', 'critical']
ALARM_OPERATORS = ['lt', 'le', 'eq', 'ne', 'ge', 'gt']
@@ -35,6 +37,12 @@ class CliAlarmList(lister.Lister):
cols.append('threshold_rule')
elif alarm_type == 'event':
cols.append('event_rule')
elif alarm_type == 'gnocchi_resources_threshold':
cols.append('gnocchi_resources_threshold_rule')
elif alarm_type == 'gnocchi_aggregation_by_metrics_threshold':
cols.append('gnocchi_aggregation_by_metrics_threshold_rule')
elif alarm_type == 'gnocchi_aggregation_by_resources_threshold':
cols.append('gnocchi_aggregation_by_resources_threshold_rule')
return cols
def get_parser(self, prog_name):
@@ -156,35 +164,66 @@ class CliAlarmCreate(show.ShowOne):
help='key[op]data_type::value; list. data_type is optional, '
'but if supplied must be string, integer, float, or boolean. '
'Used by threshold and event alarms')
common_group.add_argument(
'--comparison-operator', metavar='<OPERATOR>',
dest='comparison_operator', choices=ALARM_OPERATORS,
help='Operator to compare with, one of: ' + str(ALARM_OPERATORS))
common_group.add_argument(
'--evaluation-periods', type=int, metavar='<EVAL_PERIODS>',
dest='evaluation_periods',
help='Number of periods to evaluate over')
common_group.add_argument(
'--threshold', type=float, metavar='<THRESHOLD>',
dest='threshold', help='Threshold to evaluate against.')
common_group.add_argument(
'--metric', metavar='<METRIC>',
dest='metric', help='Metric to evaluate against.')
threshold_group = parser.add_argument_group('threshold alarm')
threshold_group.add_argument(
'-m', '--meter-name', metavar='<METRIC>',
dest='meter_name', help='Metric to evaluate against')
threshold_group.add_argument(
'--threshold', type=float, metavar='<THRESHOLD>',
dest='threshold', help='Threshold to evaluate against.')
threshold_group.add_argument(
'--period', type=int, metavar='<PERIOD>', dest='period',
help='Length of each period (seconds) to evaluate over.')
threshold_group.add_argument(
'--evaluation-periods', type=int, metavar='<EVAL_PERIODS>',
dest='evaluation_periods',
help='Number of periods to evaluate over')
threshold_group.add_argument(
'--statistic', metavar='<STATISTIC>', dest='statistic',
choices=STATISTICS,
help='Statistic to evaluate, one of: ' + str(STATISTICS))
threshold_group.add_argument(
'--comparison-operator', metavar='<OPERATOR>',
dest='comparison_operator', choices=ALARM_OPERATORS,
help='Operator to compare with, one of: ' + str(ALARM_OPERATORS))
event_group = parser.add_argument_group('event alarm')
event_group.add_argument(
'--event-type', metavar='<EVENT_TYPE>',
dest='event_type', help='Event type to evaluate against')
gnocchi_common_group = parser.add_argument_group(
'common gnocchi alarm rules')
gnocchi_common_group.add_argument(
'--granularity', metavar='<GRANULARITY>',
dest='granularity',
help='The time range in seconds over which to query.')
gnocchi_common_group.add_argument(
'--aggregation-method', metavar='<AGGR_METHOD>',
dest='aggregation_method',
help='The aggregation_method to compare to the threshold.')
gnocchi_resource_threshold_group = parser.add_argument_group(
'gnocchi resource threshold alarm')
gnocchi_resource_threshold_group.add_argument(
'--resource-type', metavar='<RESOURCE_TYPE>',
dest='resource_type', help='The type of resource.')
gnocchi_resource_threshold_group.add_argument(
'--resource-id', metavar='<RESOURCE_ID>',
dest='resource_id', help='The id of a resource.')
gnocchi_aggr_metrics_group = parser.add_argument_group(
'gnocchi aggregation by metrics alarm')
gnocchi_aggr_metrics_group.add_argument(
'--metrics', metavar='<METRICS>', action='append',
dest='metrics', help='The list of metric ids.')
self.parser = parser
return parser
@@ -193,6 +232,27 @@ class CliAlarmCreate(show.ShowOne):
not (parsed_args.meter_name and parsed_args.threshold)):
self.parser.error('threshold requires --meter-name and '
'--threshold')
elif (parsed_args.type == 'gnocchi_resources_threshold' and
not (parsed_args.metric and parsed_args.threshold and
parsed_args.resource_id and parsed_args.resource_type
and parsed_args.aggregation_method)):
self.parser.error('gnocchi_resources_threshold requires --metric, '
'--threshold, --resource-id, --resource-type '
'and --aggregation-method')
elif (parsed_args.type == 'gnocchi_aggregation_by_metrics_threshold'
and not (parsed_args.metrics and parsed_args.threshold and
parsed_args.aggregation_method)):
self.parser.error('gnocchi_aggregation_by_metrics_threshold '
'requires --metrics, --threshold and '
'--aggregation-method')
elif (parsed_args.type == 'gnocchi_aggregation_by_resources_threshold'
and not (parsed_args.metric and parsed_args.threshold and
parsed_args.query and parsed_args.resource_type and
parsed_args.aggregation_method)):
self.parser.error('gnocchi_aggregation_by_resources_threshold '
'requires --metric, --threshold, '
'--aggregation-method, --query and '
'--resource_type')
def _alarm_from_args(self, parsed_args):
alarm = utils.dict_from_parsed_args(
@@ -206,6 +266,24 @@ class CliAlarmCreate(show.ShowOne):
'query'])
alarm['event_rule'] = utils.dict_from_parsed_args(
parsed_args, ['event_type', 'query'])
alarm['gnocchi_resources_threshold_rule'] = (
utils.dict_from_parsed_args(parsed_args,
['granularity', 'comparison_operator',
'threshold', 'aggregation_method',
'evaluation_periods', 'metric',
'resource_id', 'resource_type']))
alarm['gnocchi_aggregation_by_metrics_threshold_rule'] = (
utils.dict_from_parsed_args(parsed_args,
['granularity', 'comparison_operator',
'threshold', 'aggregation_method',
'evaluation_periods', 'metrics']))
alarm['gnocchi_aggregation_by_resources_threshold_rule'] = (
utils.dict_from_parsed_args(parsed_args,
['granularity', 'comparison_operator',
'threshold', 'aggregation_method',
'evaluation_periods', 'metric',
'query', 'resource_type']))
if self.create:
alarm['type'] = parsed_args.type
self._validate_args(parsed_args)

View File

@@ -18,8 +18,9 @@ clean_exit () {
}
AODH_DATA=`mktemp -d /tmp/aodh-data-XXXXX`
GNOCCHI_DATA=`mktemp -d /tmp/gnocchi-data-XXXXX`
MYSQL_DATA=`mktemp -d /tmp/aodh-mysql-XXXXX`
trap "clean_exit \"$AODH_DATA\" \"$MYSQL_DATA\"" EXIT
trap "clean_exit \"$AODH_DATA\" \"$GNOCCHI_DATA\" \"$MYSQL_DATA\"" EXIT
mysqld --initialize-insecure --datadir=${MYSQL_DATA} || true
mkfifo ${MYSQL_DATA}/out
@@ -28,29 +29,108 @@ mysqld --no-defaults --datadir=${MYSQL_DATA} --pid-file=${MYSQL_DATA}/mysql.pid
# Wait for MySQL to start listening to connections
wait_for_line "mysqld: ready for connections." ${MYSQL_DATA}/out
export AODH_TEST_STORAGE_URL="mysql+pymysql://root@localhost/test?unix_socket=${MYSQL_DATA}/mysql.socket&charset=utf8"
mysql --no-defaults -S ${MYSQL_DATA}/mysql.socket -e 'CREATE DATABASE test;'
export GNOCCHI_TEST_INDEXER_URL="mysql+pymysql://root@localhost/gnocchi?unix_socket=${MYSQL_DATA}/mysql.socket&charset=utf8"
mysql --no-defaults -S ${MYSQL_DATA}/mysql.socket -e 'CREATE DATABASE test; CREATE DATABASE gnocchi;'
# NOTE(sileht): FIXME: we must use the upstream policy and paste
# configuration and not a copy, but aodh doesn't yet install
# etc files in virtualenv
cat << EOF > ${AODH_DATA}/policy.json
{
"context_is_admin": "role:admin",
"segregation": "rule:context_is_admin",
"admin_or_owner": "rule:context_is_admin or project_id:%(project_id)s",
"default": "rule:admin_or_owner",
"telemetry:get_alarm": "rule:admin_or_owner",
"telemetry:get_alarms": "rule:admin_or_owner",
"telemetry:query_alarm": "rule:admin_or_owner",
"telemetry:create_alarm": "",
"telemetry:change_alarm": "rule:admin_or_owner",
"telemetry:delete_alarm": "rule:admin_or_owner",
"telemetry:get_alarm_state": "rule:admin_or_owner",
"telemetry:change_alarm_state": "rule:admin_or_owner",
"telemetry:alarm_history": "rule:admin_or_owner",
"telemetry:query_alarm_history": "rule:admin_or_owner"
}
EOF
cat << EOF > ${AODH_DATA}/api-paste.ini
[pipeline:main]
# NOTE(sileht): disable authtoken
# pipeline = request_id authtoken api-server
pipeline = request_id api-server
[app:api-server]
paste.app_factory = aodh.api.app:app_factory
[filter:authtoken]
paste.filter_factory = keystonemiddleware.auth_token:filter_factory
oslo_config_project = aodh
[filter:request_id]
paste.filter_factory = oslo_middleware:RequestId.factory
EOF
mkfifo ${AODH_DATA}/out
echo '{"default": ""}' > ${AODH_DATA}/policy.json
cat > ${AODH_DATA}/aodh.conf <<EOF
[api]
paste_config = ${AODH_DATA}/api-paste.ini
# paste_config = ${VIRTUAL_ENV}/etc/aodh/api-paste.ini
[oslo_policy]
policy_file = ${AODH_DATA}/policy.json
# policy_file = ${VIRTUAL_ENV}/etc/aodh/policy.json
[database]
connection = mysql+pymysql://root@localhost/test?unix_socket=${MYSQL_DATA}/mysql.socket&charset=utf8
EOF
cat <<EOF > ${AODH_DATA}/api-paste.ini
[pipeline:main]
pipeline = aodh
[app:aodh]
paste.app_factory = aodh.api.app:app_factory
connection = $AODH_TEST_STORAGE_URL
[service_credentials]
auth_type = gnocchi-noauth
user_id = e0f4a978-694f-4ad3-b93d-8959374ab091
project_id = e0f4a978-694f-4ad3-b93d-8959374ab091
roles = admin
endpoint = http://localhost:8041/
EOF
aodh-dbsync --config-file ${AODH_DATA}/aodh.conf
aodh-api --config-file ${AODH_DATA}/aodh.conf &> ${AODH_DATA}/out &
# Wait for Aodh to start
wait_for_line "Running on http://0.0.0.0:8042/" ${AODH_DATA}/out
export AODH_ENDPOINT=http://localhost:8042/
mkfifo ${GNOCCHI_DATA}/out
cat > ${GNOCCHI_DATA}/gnocchi.conf <<EOF
[oslo_policy]
policy_file = ${VIRTUAL_ENV}/etc/gnocchi/policy.json
[api]
paste_config = ${VIRTUAL_ENV}/etc/gnocchi/api-paste.ini
[storage]
metric_processing_delay = 1
file_basepath = ${GNOCCHI_DATA}
driver = file
coordination_url = file://${GNOCCHI_DATA}
[indexer]
url = $GNOCCHI_TEST_INDEXER_URL
EOF
gnocchi-upgrade --config-file ${GNOCCHI_DATA}/gnocchi.conf
gnocchi-metricd --config-file ${GNOCCHI_DATA}/gnocchi.conf &>/dev/null &
gnocchi-api --config-file ${GNOCCHI_DATA}/gnocchi.conf &> ${GNOCCHI_DATA}/out &
# Wait for Gnocchi to start
wait_for_line "Running on http://0.0.0.0:8041/" ${GNOCCHI_DATA}/out
export GNOCCHI_ENDPOINT=http://localhost:8041/
# gnocchi alarms validate existence
curl -X POST -H 'Content-Type:application/json' ${GNOCCHI_ENDPOINT}v1/resource/instance --data '{
"display_name": "myvm",
"flavor_id": "2", "host": "blah",
"id": "6868DA77-FA82-4E67-ABA9-270C5AE8CBCA",
"image_ref": "http://image",
"project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
"user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D"
}'
$*

View File

@@ -15,5 +15,6 @@ testrepository>=0.0.18
testscenarios>=0.4
testtools>=1.4.0
http://tarballs.openstack.org/aodh/aodh-master.tar.gz#egg=aodh[mysql]
http://tarballs.openstack.org/gnocchi/gnocchi-master.tar.gz#egg=gnocchi[mysql,file]
# FIXME(sileht): should be in aodh ?
keystonemiddleware