Use monasca_common for metric validation
Depends-On: I91536f28ca3d58f1002eed5a7631b6c4c377b6b0 Depends-On: Ibabff76b3f1592334c281f57a1a5b939bb11e1f8 Change-Id: I40ce762a9ede88069f759819fe86aedf63476ab8
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP
|
# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
@@ -297,18 +297,17 @@ class AlarmsRepository(mysql_repository.MySQLRepository,
|
|||||||
"""
|
"""
|
||||||
sub_select_parms = []
|
sub_select_parms = []
|
||||||
i = 0
|
i = 0
|
||||||
for metric_dimension in query_parms['metric_dimensions']:
|
for metric_dimension in query_parms['metric_dimensions'].items():
|
||||||
parsed_dimension = metric_dimension.split(':')
|
if not metric_dimension[1]:
|
||||||
if len(parsed_dimension) == 1:
|
|
||||||
values = None
|
values = None
|
||||||
value_sql = ""
|
value_sql = ""
|
||||||
elif '|' in parsed_dimension[1]:
|
elif '|' in metric_dimension[1]:
|
||||||
values = parsed_dimension[1].encode('utf8').split('|')
|
values = metric_dimension[1].encode('utf8').split('|')
|
||||||
value_sql = " and ("
|
value_sql = " and ("
|
||||||
value_sql += " or ".join(["value = %s" for j in xrange(len(values))])
|
value_sql += " or ".join(["value = %s" for j in xrange(len(values))])
|
||||||
value_sql += ') '
|
value_sql += ') '
|
||||||
else:
|
else:
|
||||||
values = [parsed_dimension[1]]
|
values = [metric_dimension[1]]
|
||||||
value_sql = " and value = %s "
|
value_sql = " and value = %s "
|
||||||
sub_select_clause += """
|
sub_select_clause += """
|
||||||
inner join (select distinct dimension_set_id
|
inner join (select distinct dimension_set_id
|
||||||
@@ -317,8 +316,8 @@ class AlarmsRepository(mysql_repository.MySQLRepository,
|
|||||||
on md{}.dimension_set_id = mdd.metric_dimension_set_id
|
on md{}.dimension_set_id = mdd.metric_dimension_set_id
|
||||||
""".format(value_sql, i, i)
|
""".format(value_sql, i, i)
|
||||||
i += 1
|
i += 1
|
||||||
sub_select_parms.append(parsed_dimension[0].encode('utf8'))
|
sub_select_parms.append(metric_dimension[0].encode('utf8'))
|
||||||
if len(parsed_dimension) > 1 and values:
|
if len(metric_dimension) > 1 and values:
|
||||||
sub_select_parms.extend(values)
|
sub_select_parms.extend(values)
|
||||||
|
|
||||||
sub_select_clause += ")"
|
sub_select_clause += ")"
|
||||||
|
@@ -317,16 +317,16 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||||||
|
|
||||||
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
|
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
|
||||||
|
|
||||||
for i, metric_dimension in enumerate(query_parms['metric_dimensions']):
|
for i, metric_dimension in enumerate(query_parms['metric_dimensions'].items()):
|
||||||
|
|
||||||
md_name = "b_md_name_{}".format(i)
|
md_name = "b_md_name_{}".format(i)
|
||||||
|
|
||||||
values_cond = None
|
values_cond = None
|
||||||
values_cond_flag = False
|
values_cond_flag = False
|
||||||
|
|
||||||
parsed_dimension = metric_dimension.split(':')
|
if metric_dimension and metric_dimension[1]:
|
||||||
if parsed_dimension and len(parsed_dimension) > 1:
|
if '|' in metric_dimension[1]:
|
||||||
if '|' in parsed_dimension[1]:
|
values = metric_dimension[1].encode('utf8').split('|')
|
||||||
values = parsed_dimension[1].encode('utf8').split('|')
|
|
||||||
sub_values_cond = []
|
sub_values_cond = []
|
||||||
for j, value in enumerate(values):
|
for j, value in enumerate(values):
|
||||||
sub_md_value = "b_md_value_{}_{}".format(i, j)
|
sub_md_value = "b_md_value_{}_{}".format(i, j)
|
||||||
@@ -338,7 +338,7 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||||||
md_value = "b_md_value_{}".format(i)
|
md_value = "b_md_value_{}".format(i)
|
||||||
values_cond = (md.c.value == bindparam(md_value))
|
values_cond = (md.c.value == bindparam(md_value))
|
||||||
values_cond_flag = True
|
values_cond_flag = True
|
||||||
parms[md_value] = parsed_dimension[1]
|
parms[md_value] = metric_dimension[1]
|
||||||
|
|
||||||
sub_query_md = (sub_query_md_base
|
sub_query_md = (sub_query_md_base
|
||||||
.where(md.c.name == bindparam(md_name)))
|
.where(md.c.name == bindparam(md_name)))
|
||||||
@@ -355,7 +355,7 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||||||
sub_query_md.c.dimension_set_id ==
|
sub_query_md.c.dimension_set_id ==
|
||||||
mdd.c.metric_dimension_set_id))
|
mdd.c.metric_dimension_set_id))
|
||||||
|
|
||||||
parms[md_name] = parsed_dimension[0].encode('utf8')
|
parms[md_name] = metric_dimension[0].encode('utf8')
|
||||||
|
|
||||||
sub_query = (sub_query
|
sub_query = (sub_query
|
||||||
.select_from(sub_query_from)
|
.select_from(sub_query_from)
|
||||||
@@ -521,7 +521,7 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||||||
|
|
||||||
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
|
sub_query_md_base = select([md.c.dimension_set_id]).select_from(md)
|
||||||
|
|
||||||
for i, metric_dimension in enumerate(query_parms['metric_dimensions']):
|
for i, metric_dimension in enumerate(query_parms['metric_dimensions'].items()):
|
||||||
md_name = "b_md_name_{}".format(i)
|
md_name = "b_md_name_{}".format(i)
|
||||||
md_value = "b_md_value_{}".format(i)
|
md_value = "b_md_value_{}".format(i)
|
||||||
|
|
||||||
@@ -531,14 +531,13 @@ class AlarmsRepository(sql_repository.SQLRepository,
|
|||||||
.distinct()
|
.distinct()
|
||||||
.alias('md_{}'.format(i)))
|
.alias('md_{}'.format(i)))
|
||||||
|
|
||||||
parsed_dimension = metric_dimension.split(':')
|
|
||||||
sub_query_from = (sub_query_from
|
sub_query_from = (sub_query_from
|
||||||
.join(sub_query_md,
|
.join(sub_query_md,
|
||||||
sub_query_md.c.dimension_set_id ==
|
sub_query_md.c.dimension_set_id ==
|
||||||
mdd.c.metric_dimension_set_id))
|
mdd.c.metric_dimension_set_id))
|
||||||
|
|
||||||
parms[md_name] = parsed_dimension[0].encode('utf8')
|
parms[md_name] = metric_dimension[0].encode('utf8')
|
||||||
parms[md_value] = parsed_dimension[1].encode('utf8')
|
parms[md_value] = metric_dimension[1].encode('utf8')
|
||||||
|
|
||||||
sub_query = (sub_query
|
sub_query = (sub_query
|
||||||
.select_from(sub_query_from)
|
.select_from(sub_query_from)
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
# Copyright 2015 Cray
|
# Copyright 2015 Cray
|
||||||
# Copyright 2016 FUJITSU LIMITED
|
# Copyright 2016 FUJITSU LIMITED
|
||||||
|
# Copyright 2017 Hewlett Packard Enterprise Development LP
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
@@ -657,7 +658,7 @@ class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
|
|||||||
self.assertEqual(res, expected)
|
self.assertEqual(res, expected)
|
||||||
|
|
||||||
query_parms = {'metric_name': 'cpu.idle_perc',
|
query_parms = {'metric_name': 'cpu.idle_perc',
|
||||||
'metric_dimensions': ['flavor_id:222']}
|
'metric_dimensions': {'flavor_id': '222'}}
|
||||||
res = self.repo.get_alarms(tenant_id=tenant_id,
|
res = self.repo.get_alarms(tenant_id=tenant_id,
|
||||||
query_parms=query_parms,
|
query_parms=query_parms,
|
||||||
limit=1000)
|
limit=1000)
|
||||||
@@ -668,7 +669,8 @@ class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
|
|||||||
self.assertEqual(res, expected)
|
self.assertEqual(res, expected)
|
||||||
|
|
||||||
query_parms = {'metric_name': 'cpu.idle_perc',
|
query_parms = {'metric_name': 'cpu.idle_perc',
|
||||||
'metric_dimensions': ['service:monitoring', 'hostname:roland']}
|
'metric_dimensions': {'service': 'monitoring',
|
||||||
|
'hostname': 'roland'}}
|
||||||
res = self.repo.get_alarms(tenant_id=tenant_id,
|
res = self.repo.get_alarms(tenant_id=tenant_id,
|
||||||
query_parms=query_parms,
|
query_parms=query_parms,
|
||||||
limit=1000)
|
limit=1000)
|
||||||
@@ -689,7 +691,7 @@ class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
|
|||||||
|
|
||||||
alarm_def_id = self.alarm1['alarm_definition']['id']
|
alarm_def_id = self.alarm1['alarm_definition']['id']
|
||||||
query_parms = {'metric_name': 'cpu.idle_perc',
|
query_parms = {'metric_name': 'cpu.idle_perc',
|
||||||
'metric_dimensions': ['service:monitoring'],
|
'metric_dimensions': {'service': 'monitoring'},
|
||||||
'alarm_definition_id': alarm_def_id}
|
'alarm_definition_id': alarm_def_id}
|
||||||
res = self.repo.get_alarms(tenant_id=tenant_id,
|
res = self.repo.get_alarms(tenant_id=tenant_id,
|
||||||
query_parms=query_parms,
|
query_parms=query_parms,
|
||||||
@@ -735,7 +737,7 @@ class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
|
|||||||
self.assertEqual(res, expected)
|
self.assertEqual(res, expected)
|
||||||
|
|
||||||
query_parms = {'metric_name': 'cpu.idle_perc',
|
query_parms = {'metric_name': 'cpu.idle_perc',
|
||||||
'metric_dimensions': ['service:monitoring'],
|
'metric_dimensions': {'service': 'monitoring'},
|
||||||
'state': 'UNDETERMINED'}
|
'state': 'UNDETERMINED'}
|
||||||
res = self.repo.get_alarms(tenant_id=tenant_id,
|
res = self.repo.get_alarms(tenant_id=tenant_id,
|
||||||
query_parms=query_parms,
|
query_parms=query_parms,
|
||||||
@@ -748,7 +750,7 @@ class TestAlarmRepoDB(testtools.TestCase, fixtures.TestWithFixtures):
|
|||||||
|
|
||||||
time_now = datetime.datetime.now().isoformat() + 'Z'
|
time_now = datetime.datetime.now().isoformat() + 'Z'
|
||||||
query_parms = {'metric_name': 'cpu.idle_perc',
|
query_parms = {'metric_name': 'cpu.idle_perc',
|
||||||
'metric_dimensions': ['service:monitoring'],
|
'metric_dimensions': {'service': 'monitoring'},
|
||||||
'state': 'UNDETERMINED',
|
'state': 'UNDETERMINED',
|
||||||
'state_updated_start_time': time_now}
|
'state_updated_start_time': time_now}
|
||||||
res = self.repo.get_alarms(tenant_id=tenant_id,
|
res = self.repo.get_alarms(tenant_id=tenant_id,
|
||||||
|
@@ -26,118 +26,6 @@ import monasca_api.v2.common.validation as validation
|
|||||||
import monasca_api.v2.reference.helpers as helpers
|
import monasca_api.v2.reference.helpers as helpers
|
||||||
|
|
||||||
|
|
||||||
invalid_chars = "<>={}(),\"\\|;&"
|
|
||||||
|
|
||||||
|
|
||||||
class TestMetricNameValidation(unittest.TestCase):
|
|
||||||
def test_valid_name(self):
|
|
||||||
metric_name = "this.is_a.valid-name"
|
|
||||||
validation.metric_name(metric_name)
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def test_nonstring_name(self):
|
|
||||||
metric_name = 123456789
|
|
||||||
self.assertRaises(AssertionError, validation.metric_name, metric_name)
|
|
||||||
|
|
||||||
def test_long_name(self):
|
|
||||||
metric_name = ("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz")
|
|
||||||
self.assertRaises(AssertionError, validation.metric_name, metric_name)
|
|
||||||
|
|
||||||
def test_invalid_chars(self):
|
|
||||||
for c in invalid_chars:
|
|
||||||
metric_name = "this{}that".format(c)
|
|
||||||
self.assertRaises(AssertionError, validation.metric_name, metric_name)
|
|
||||||
|
|
||||||
|
|
||||||
class TestDimensionValidation(unittest.TestCase):
|
|
||||||
def test_valid_key(self):
|
|
||||||
dim_key = "this.is_a.valid-key"
|
|
||||||
validation.dimension_key(dim_key)
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def test_nonstring_key(self):
|
|
||||||
dim_key = 123456
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_key, dim_key)
|
|
||||||
|
|
||||||
def test_long_key(self):
|
|
||||||
dim_key = ("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz")
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_key, dim_key)
|
|
||||||
|
|
||||||
def test_key_starts_with_underscore(self):
|
|
||||||
dim_key = '_key'
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_key, dim_key)
|
|
||||||
|
|
||||||
def test_invalid_chars_key(self):
|
|
||||||
for c in invalid_chars:
|
|
||||||
dim_key = "this{}that".format(c)
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_key, dim_key)
|
|
||||||
|
|
||||||
def test_valid_value(self):
|
|
||||||
dim_value = "this.is_a.valid-value"
|
|
||||||
validation.dimension_value(dim_value)
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def test_nonstring_value(self):
|
|
||||||
dim_value = None
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_value, dim_value)
|
|
||||||
|
|
||||||
def test_long_value(self):
|
|
||||||
dim_value = ("abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
|
|
||||||
"abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz")
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_value, dim_value)
|
|
||||||
|
|
||||||
def test_invalid_chars_value(self):
|
|
||||||
for c in invalid_chars:
|
|
||||||
dim_value = "this{}that".format(c)
|
|
||||||
self.assertRaises(AssertionError, validation.dimension_value, dim_value)
|
|
||||||
|
|
||||||
|
|
||||||
class TestValueMetaValidation(unittest.TestCase):
|
|
||||||
def test_valid_name(self):
|
|
||||||
value_meta_name = "this.is_a.valid-name"
|
|
||||||
value_meta = {value_meta_name: 'value_meta_value'}
|
|
||||||
validation.validate_value_meta(value_meta)
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def test_nonstring_name(self):
|
|
||||||
value_meta_name = 123456
|
|
||||||
value_meta = {value_meta_name: 'value_meta_value'}
|
|
||||||
self.assertRaises(AssertionError, validation.validate_value_meta,
|
|
||||||
value_meta)
|
|
||||||
|
|
||||||
def test_long_name(self):
|
|
||||||
value_meta_name = "x" * 256
|
|
||||||
value_meta = {value_meta_name: 'value_meta_value'}
|
|
||||||
self.assertRaises(AssertionError, validation.validate_value_meta,
|
|
||||||
value_meta)
|
|
||||||
|
|
||||||
def test_valid_value(self):
|
|
||||||
value_meta_value = "this.is_a.valid-value"
|
|
||||||
value_meta = {'value_meta_name': value_meta_value}
|
|
||||||
validation.validate_value_meta(value_meta)
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def test_nonstring_value(self):
|
|
||||||
value_meta_value = 123456
|
|
||||||
value_meta = {'value_meta_name': value_meta_value}
|
|
||||||
self.assertRaises(AssertionError, validation.validate_value_meta,
|
|
||||||
value_meta)
|
|
||||||
|
|
||||||
def test_long_value_meta(self):
|
|
||||||
value_meta_value = "x" * 2048
|
|
||||||
value_meta = {'value_meta_name': value_meta_value}
|
|
||||||
self.assertRaises(AssertionError, validation.validate_value_meta,
|
|
||||||
value_meta)
|
|
||||||
|
|
||||||
|
|
||||||
class TestStateValidation(unittest.TestCase):
|
class TestStateValidation(unittest.TestCase):
|
||||||
|
|
||||||
VALID_STATES = "OK", "ALARM", "UNDETERMINED"
|
VALID_STATES = "OK", "ALARM", "UNDETERMINED"
|
||||||
|
@@ -1,33 +0,0 @@
|
|||||||
# Copyright 2014 Hewlett-Packard
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_log import log
|
|
||||||
import voluptuous
|
|
||||||
|
|
||||||
from monasca_api.v2.common.schemas import exceptions
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
|
||||||
|
|
||||||
dimensions_schema = voluptuous.Schema({
|
|
||||||
voluptuous.All(voluptuous.Any(str, unicode),
|
|
||||||
voluptuous.Length(max=255)): voluptuous.All(
|
|
||||||
voluptuous.Any(str, unicode), voluptuous.Length(max=255))})
|
|
||||||
|
|
||||||
|
|
||||||
def validate(dimensions):
|
|
||||||
try:
|
|
||||||
dimensions_schema(dimensions)
|
|
||||||
except Exception as ex:
|
|
||||||
LOG.debug(ex)
|
|
||||||
raise exceptions.ValidationException(str(ex))
|
|
@@ -1,31 +0,0 @@
|
|||||||
# Copyright 2014 Hewlett-Packard
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_log import log
|
|
||||||
import voluptuous
|
|
||||||
|
|
||||||
from monasca_api.v2.common.schemas import exceptions
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
|
||||||
|
|
||||||
metric_name_schema = voluptuous.Schema(
|
|
||||||
voluptuous.All(voluptuous.Any(str, unicode), voluptuous.Length(max=64)))
|
|
||||||
|
|
||||||
|
|
||||||
def validate(name):
|
|
||||||
try:
|
|
||||||
metric_name_schema(name)
|
|
||||||
except Exception as ex:
|
|
||||||
LOG.debug(ex)
|
|
||||||
raise exceptions.ValidationException(str(ex))
|
|
@@ -1,40 +0,0 @@
|
|||||||
# Copyright 2014 Hewlett-Packard
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
from oslo_log import log
|
|
||||||
import voluptuous
|
|
||||||
|
|
||||||
from monasca_api.v2.common.schemas import dimensions_schema
|
|
||||||
from monasca_api.v2.common.schemas import exceptions
|
|
||||||
from monasca_api.v2.common.schemas import metric_name_schema
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
|
||||||
|
|
||||||
metric_schema = {
|
|
||||||
voluptuous.Required('name'): metric_name_schema.metric_name_schema,
|
|
||||||
voluptuous.Optional('dimensions'): dimensions_schema.dimensions_schema,
|
|
||||||
voluptuous.Required('timestamp'): voluptuous.All(
|
|
||||||
voluptuous.Any(int, float), voluptuous.Range(min=0)),
|
|
||||||
voluptuous.Required('value'): voluptuous.Any(int, float)}
|
|
||||||
|
|
||||||
request_body_schema = voluptuous.Schema(
|
|
||||||
voluptuous.Any(metric_schema, [metric_schema]))
|
|
||||||
|
|
||||||
|
|
||||||
def validate(msg):
|
|
||||||
try:
|
|
||||||
request_body_schema(msg)
|
|
||||||
except Exception as ex:
|
|
||||||
LOG.debug(ex)
|
|
||||||
raise exceptions.ValidationException(str(ex))
|
|
@@ -14,47 +14,15 @@
|
|||||||
|
|
||||||
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
||||||
|
|
||||||
import json
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
invalid_chars = "<>={}(),\"\\\\|;&"
|
|
||||||
restricted_chars = re.compile('[' + invalid_chars + ']')
|
|
||||||
|
|
||||||
VALID_ALARM_STATES = ["ALARM", "OK", "UNDETERMINED"]
|
VALID_ALARM_STATES = ["ALARM", "OK", "UNDETERMINED"]
|
||||||
|
|
||||||
VALID_ALARM_DEFINITION_SEVERITIES = ["LOW", "MEDIUM", "HIGH", "CRITICAL"]
|
VALID_ALARM_DEFINITION_SEVERITIES = ["LOW", "MEDIUM", "HIGH", "CRITICAL"]
|
||||||
|
|
||||||
VALUE_META_MAX_NUMBER = 16
|
|
||||||
|
|
||||||
VALUE_META_MAX_LENGTH = 2048
|
|
||||||
|
|
||||||
VALUE_META_NAME_MAX_LENGTH = 255
|
|
||||||
|
|
||||||
EMAIL_PATTERN = '^.+@.+$'
|
EMAIL_PATTERN = '^.+@.+$'
|
||||||
|
|
||||||
|
|
||||||
def metric_name(name):
|
|
||||||
assert isinstance(name, (str, unicode)), "Metric name must be a string"
|
|
||||||
assert len(name) <= 255, "Metric name must be 255 characters or less"
|
|
||||||
assert len(name) >= 1, "Metric name cannot be empty"
|
|
||||||
assert not restricted_chars.search(name), "Invalid characters in metric name " + name
|
|
||||||
|
|
||||||
|
|
||||||
def dimension_key(dkey):
|
|
||||||
assert isinstance(dkey, (str, unicode)), "Dimension key must be a string"
|
|
||||||
assert len(dkey) <= 255, "Dimension key must be 255 characters or less"
|
|
||||||
assert len(dkey) >= 1, "Dimension key cannot be empty"
|
|
||||||
assert dkey[0] != '_', "Dimension key cannot start with underscore (_)"
|
|
||||||
assert not restricted_chars.search(dkey), "Invalid characters in dimension name " + dkey
|
|
||||||
|
|
||||||
|
|
||||||
def dimension_value(value):
|
|
||||||
assert isinstance(value, (str, unicode)), "Dimension value must be a string"
|
|
||||||
assert len(value) <= 255, "Dimension value must be 255 characters or less"
|
|
||||||
assert len(value) >= 1, "Dimension value cannot be empty"
|
|
||||||
assert not restricted_chars.search(value), "Invalid characters in dimension value " + value
|
|
||||||
|
|
||||||
|
|
||||||
def validate_alarm_state(state):
|
def validate_alarm_state(state):
|
||||||
if state.upper() not in VALID_ALARM_STATES:
|
if state.upper() not in VALID_ALARM_STATES:
|
||||||
raise HTTPUnprocessableEntityError("Invalid State",
|
raise HTTPUnprocessableEntityError("Invalid State",
|
||||||
@@ -92,29 +60,6 @@ def validate_sort_by(sort_by_list, allowed_sort_by):
|
|||||||
sort_by_values[1]))
|
sort_by_values[1]))
|
||||||
|
|
||||||
|
|
||||||
def validate_value_meta(value_meta):
|
|
||||||
if not value_meta:
|
|
||||||
return
|
|
||||||
|
|
||||||
value_meta_string = json.dumps(value_meta)
|
|
||||||
# entries
|
|
||||||
assert len(value_meta) <= VALUE_META_MAX_NUMBER, "ValueMeta entries must be {} or less".format(
|
|
||||||
VALUE_META_MAX_NUMBER)
|
|
||||||
# total length
|
|
||||||
assert len(value_meta_string) <= VALUE_META_MAX_LENGTH, \
|
|
||||||
"ValueMeta name value combinations must be {} characters or less".format(
|
|
||||||
VALUE_META_MAX_LENGTH)
|
|
||||||
for name in value_meta:
|
|
||||||
# name
|
|
||||||
assert isinstance(name, (str, unicode)), "ValueMeta name must be a string"
|
|
||||||
assert len(name) <= VALUE_META_NAME_MAX_LENGTH, "ValueMeta name must be {} characters or less".format(
|
|
||||||
VALUE_META_NAME_MAX_LENGTH)
|
|
||||||
assert len(name) >= 1, "ValueMeta name cannot be empty"
|
|
||||||
# value
|
|
||||||
assert isinstance(value_meta[name], (str, unicode)), "ValueMeta value must be a string"
|
|
||||||
assert len(value_meta[name]) >= 1, "ValueMeta value cannot be empty"
|
|
||||||
|
|
||||||
|
|
||||||
def validate_email_address(email):
|
def validate_email_address(email):
|
||||||
if re.match(EMAIL_PATTERN, email) is None:
|
if re.match(EMAIL_PATTERN, email) is None:
|
||||||
return False
|
return False
|
||||||
|
@@ -16,6 +16,7 @@ import re
|
|||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
from monasca_common.simport import simport
|
from monasca_common.simport import simport
|
||||||
|
from monasca_common.validation import metrics as metric_validation
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import pyparsing
|
import pyparsing
|
||||||
@@ -354,7 +355,7 @@ class AlarmDefinitions(alarm_definitions_api_v2.AlarmDefinitionsV2API,
|
|||||||
schema_alarms.validate(alarm_definition, require_all=require_all)
|
schema_alarms.validate(alarm_definition, require_all=require_all)
|
||||||
if 'match_by' in alarm_definition:
|
if 'match_by' in alarm_definition:
|
||||||
for name in alarm_definition['match_by']:
|
for name in alarm_definition['match_by']:
|
||||||
validation.dimension_key(name)
|
metric_validation.validate_dimension_key(name)
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.debug(ex)
|
LOG.debug(ex)
|
||||||
|
@@ -134,10 +134,8 @@ class Alarms(alarms_api_v2.AlarmsV2API,
|
|||||||
'state_updated_timestamp', 'updated_timestamp', 'created_timestamp'}
|
'state_updated_timestamp', 'updated_timestamp', 'created_timestamp'}
|
||||||
validation.validate_sort_by(query_parms['sort_by'], allowed_sort_by)
|
validation.validate_sort_by(query_parms['sort_by'], allowed_sort_by)
|
||||||
|
|
||||||
# ensure metric_dimensions is a list
|
query_parms['metric_dimensions'] = helpers.get_query_dimensions(req, 'metric_dimensions')
|
||||||
if 'metric_dimensions' in query_parms and isinstance(query_parms['metric_dimensions'], str):
|
helpers.validate_query_dimensions(query_parms['metric_dimensions'])
|
||||||
query_parms['metric_dimensions'] = query_parms['metric_dimensions'].split(',')
|
|
||||||
self._validate_dimensions(query_parms['metric_dimensions'])
|
|
||||||
|
|
||||||
offset = helpers.get_query_param(req, 'offset')
|
offset = helpers.get_query_param(req, 'offset')
|
||||||
if offset is not None and not isinstance(offset, int):
|
if offset is not None and not isinstance(offset, int):
|
||||||
@@ -161,23 +159,6 @@ class Alarms(alarms_api_v2.AlarmsV2API,
|
|||||||
res.body = helpers.dumpit_utf8(result)
|
res.body = helpers.dumpit_utf8(result)
|
||||||
res.status = falcon.HTTP_200
|
res.status = falcon.HTTP_200
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _validate_dimensions(dimensions):
|
|
||||||
try:
|
|
||||||
assert isinstance(dimensions, list)
|
|
||||||
for dimension in dimensions:
|
|
||||||
name_value = dimension.split(':')
|
|
||||||
validation.dimension_key(name_value[0])
|
|
||||||
if len(name_value) > 1:
|
|
||||||
if '|' in name_value[1]:
|
|
||||||
values = name_value[1].split('|')
|
|
||||||
for value in values:
|
|
||||||
validation.dimension_value(value)
|
|
||||||
else:
|
|
||||||
validation.dimension_value(name_value[1])
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPUnprocessableEntityError("Unprocessable Entity", str(e))
|
|
||||||
|
|
||||||
def _alarm_update(self, tenant_id, alarm_id, new_state, lifecycle_state,
|
def _alarm_update(self, tenant_id, alarm_id, new_state, lifecycle_state,
|
||||||
link):
|
link):
|
||||||
|
|
||||||
@@ -402,9 +383,8 @@ class AlarmsCount(alarms_api_v2.AlarmsCountV2API, alarming.Alarming):
|
|||||||
query_parms['group_by'] = [query_parms['group_by']]
|
query_parms['group_by'] = [query_parms['group_by']]
|
||||||
self._validate_group_by(query_parms['group_by'])
|
self._validate_group_by(query_parms['group_by'])
|
||||||
|
|
||||||
# ensure metric_dimensions is a list
|
query_parms['metric_dimensions'] = helpers.get_query_dimensions(req, 'metric_dimensions')
|
||||||
if 'metric_dimensions' in query_parms and isinstance(query_parms['metric_dimensions'], str):
|
helpers.validate_query_dimensions(query_parms['metric_dimensions'])
|
||||||
query_parms['metric_dimensions'] = query_parms['metric_dimensions'].split(',')
|
|
||||||
|
|
||||||
offset = helpers.get_query_param(req, 'offset')
|
offset = helpers.get_query_param(req, 'offset')
|
||||||
|
|
||||||
@@ -496,11 +476,12 @@ class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API,
|
|||||||
helpers.validate_authorization(req, self._get_alarms_authorized_roles)
|
helpers.validate_authorization(req, self._get_alarms_authorized_roles)
|
||||||
start_timestamp = helpers.get_query_starttime_timestamp(req, False)
|
start_timestamp = helpers.get_query_starttime_timestamp(req, False)
|
||||||
end_timestamp = helpers.get_query_endtime_timestamp(req, False)
|
end_timestamp = helpers.get_query_endtime_timestamp(req, False)
|
||||||
query_parms = falcon.uri.parse_query_string(req.query_string)
|
|
||||||
offset = helpers.get_query_param(req, 'offset')
|
offset = helpers.get_query_param(req, 'offset')
|
||||||
|
dimensions = helpers.get_query_dimensions(req)
|
||||||
|
helpers.validate_query_dimensions(dimensions)
|
||||||
|
|
||||||
result = self._alarm_history_list(req.project_id, start_timestamp,
|
result = self._alarm_history_list(req.project_id, start_timestamp,
|
||||||
end_timestamp, query_parms,
|
end_timestamp, dimensions,
|
||||||
req.uri, offset, req.limit)
|
req.uri, offset, req.limit)
|
||||||
|
|
||||||
res.body = helpers.dumpit_utf8(result)
|
res.body = helpers.dumpit_utf8(result)
|
||||||
@@ -510,7 +491,7 @@ class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API,
|
|||||||
helpers.validate_authorization(req, self._get_alarms_authorized_roles)
|
helpers.validate_authorization(req, self._get_alarms_authorized_roles)
|
||||||
offset = helpers.get_query_param(req, 'offset')
|
offset = helpers.get_query_param(req, 'offset')
|
||||||
|
|
||||||
result = self._alarm_history(req.project_id, [alarm_id],
|
result = self._alarm_history(req.project_id, alarm_id,
|
||||||
req.uri, offset,
|
req.uri, offset,
|
||||||
req.limit)
|
req.limit)
|
||||||
|
|
||||||
@@ -518,17 +499,11 @@ class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API,
|
|||||||
res.status = falcon.HTTP_200
|
res.status = falcon.HTTP_200
|
||||||
|
|
||||||
def _alarm_history_list(self, tenant_id, start_timestamp,
|
def _alarm_history_list(self, tenant_id, start_timestamp,
|
||||||
end_timestamp, query_parms, req_uri, offset,
|
end_timestamp, dimensions, req_uri, offset,
|
||||||
limit):
|
limit):
|
||||||
|
|
||||||
# get_alarms expects 'metric_dimensions' for dimensions key.
|
# get_alarms expects 'metric_dimensions' for dimensions key.
|
||||||
if 'dimensions' in query_parms:
|
new_query_parms = {'metric_dimensions': dimensions}
|
||||||
dimensions = query_parms['dimensions']
|
|
||||||
if not isinstance(dimensions, list):
|
|
||||||
dimensions = [dimensions]
|
|
||||||
new_query_parms = {'metric_dimensions': dimensions}
|
|
||||||
else:
|
|
||||||
new_query_parms = {}
|
|
||||||
|
|
||||||
alarm_rows = self._alarms_repo.get_alarms(tenant_id, new_query_parms,
|
alarm_rows = self._alarms_repo.get_alarms(tenant_id, new_query_parms,
|
||||||
None, None)
|
None, None)
|
||||||
@@ -543,7 +518,7 @@ class AlarmsStateHistory(alarms_api_v2.AlarmsStateHistoryV2API,
|
|||||||
|
|
||||||
def _alarm_history(self, tenant_id, alarm_id, req_uri, offset, limit):
|
def _alarm_history(self, tenant_id, alarm_id, req_uri, offset, limit):
|
||||||
|
|
||||||
result = self._metrics_repo.alarm_history(tenant_id, alarm_id, offset,
|
result = self._metrics_repo.alarm_history(tenant_id, [alarm_id], offset,
|
||||||
limit)
|
limit)
|
||||||
|
|
||||||
return helpers.paginate(result, req_uri, limit)
|
return helpers.paginate(result, req_uri, limit)
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
# Copyright 2015 Cray Inc. All Rights Reserved.
|
# Copyright 2015 Cray Inc. All Rights Reserved.
|
||||||
# (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP
|
# (C) Copyright 2014,2016-2017 Hewlett Packard Enterprise Development LP
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
# not use this file except in compliance with the License. You may obtain
|
# not use this file except in compliance with the License. You may obtain
|
||||||
@@ -17,6 +17,7 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
|
from monasca_common.validation import metrics as metric_validation
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
import simplejson
|
import simplejson
|
||||||
@@ -24,9 +25,6 @@ import six
|
|||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
||||||
from monasca_api.v2.common.schemas import dimensions_schema
|
|
||||||
from monasca_api.v2.common.schemas import exceptions as schemas_exceptions
|
|
||||||
from monasca_api.v2.common.schemas import metric_name_schema
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
@@ -141,37 +139,38 @@ def get_query_name(req, name_required=False):
|
|||||||
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
||||||
|
|
||||||
|
|
||||||
def get_query_dimensions(req):
|
def get_query_dimensions(req, param_key='dimensions'):
|
||||||
"""Gets and parses the query param dimensions.
|
"""Gets and parses the query param dimensions.
|
||||||
|
|
||||||
:param req: HTTP request object.
|
:param req: HTTP request object.
|
||||||
|
:param dimensions_param: param name for dimensions, default='dimensions'
|
||||||
:return: Returns the dimensions as a JSON object
|
:return: Returns the dimensions as a JSON object
|
||||||
:raises falcon.HTTPBadRequest: If dimensions are malformed.
|
:raises falcon.HTTPBadRequest: If dimensions are malformed.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
params = falcon.uri.parse_query_string(req.query_string)
|
params = falcon.uri.parse_query_string(req.query_string)
|
||||||
dimensions = {}
|
dimensions = {}
|
||||||
if 'dimensions' in params:
|
if param_key not in params:
|
||||||
dimensions_param = params['dimensions']
|
return dimensions
|
||||||
|
|
||||||
if isinstance(dimensions_param, basestring):
|
dimensions_param = params[param_key]
|
||||||
dimensions_str_array = dimensions_param.split(',')
|
if isinstance(dimensions_param, basestring):
|
||||||
elif isinstance(dimensions_param, list):
|
dimensions_str_array = dimensions_param.split(',')
|
||||||
dimensions_str_array = []
|
elif isinstance(dimensions_param, list):
|
||||||
for sublist in dimensions_param:
|
dimensions_str_array = []
|
||||||
dimensions_str_array.extend(sublist.split(","))
|
for sublist in dimensions_param:
|
||||||
|
dimensions_str_array.extend(sublist.split(","))
|
||||||
|
else:
|
||||||
|
raise Exception("Error parsing dimensions, unknown format")
|
||||||
|
|
||||||
|
for dimension in dimensions_str_array:
|
||||||
|
dimension_name_value = dimension.split(':')
|
||||||
|
if len(dimension_name_value) == 2:
|
||||||
|
dimensions[dimension_name_value[0]] = dimension_name_value[1]
|
||||||
|
elif len(dimension_name_value) == 1:
|
||||||
|
dimensions[dimension_name_value[0]] = ""
|
||||||
else:
|
else:
|
||||||
raise Exception("Error parsing dimensions, unknown format")
|
raise Exception('Dimensions are malformed')
|
||||||
|
|
||||||
for dimension in dimensions_str_array:
|
|
||||||
dimension_name_value = dimension.split(':')
|
|
||||||
if len(dimension_name_value) == 2:
|
|
||||||
dimensions[dimension_name_value[0]] = dimension_name_value[
|
|
||||||
1]
|
|
||||||
elif len(dimension_name_value) == 1:
|
|
||||||
dimensions[dimension_name_value[0]] = ""
|
|
||||||
else:
|
|
||||||
raise Exception('Dimensions are malformed')
|
|
||||||
return dimensions
|
return dimensions
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.debug(ex)
|
LOG.debug(ex)
|
||||||
@@ -284,9 +283,11 @@ def validate_query_name(name):
|
|||||||
:param name: Query param name.
|
:param name: Query param name.
|
||||||
:raises falcon.HTTPBadRequest: If name is not valid.
|
:raises falcon.HTTPBadRequest: If name is not valid.
|
||||||
"""
|
"""
|
||||||
|
if not name:
|
||||||
|
return
|
||||||
try:
|
try:
|
||||||
metric_name_schema.validate(name)
|
metric_validation.validate_name(name)
|
||||||
except schemas_exceptions.ValidationException as ex:
|
except Exception as ex:
|
||||||
LOG.debug(ex)
|
LOG.debug(ex)
|
||||||
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
||||||
|
|
||||||
@@ -298,8 +299,19 @@ def validate_query_dimensions(dimensions):
|
|||||||
:raises falcon.HTTPBadRequest: If dimensions are not valid.
|
:raises falcon.HTTPBadRequest: If dimensions are not valid.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
dimensions_schema.validate(dimensions)
|
|
||||||
except schemas_exceptions.ValidationException as ex:
|
for key, value in dimensions.items():
|
||||||
|
if key.startswith('_'):
|
||||||
|
raise Exception("Dimension key {} may not start with '_'".format(key))
|
||||||
|
metric_validation.validate_dimension_key(key)
|
||||||
|
if value:
|
||||||
|
if '|' in value:
|
||||||
|
values = value.split('|')
|
||||||
|
for v in values:
|
||||||
|
metric_validation.validate_dimension_value(key, v)
|
||||||
|
else:
|
||||||
|
metric_validation.validate_dimension_value(key, value)
|
||||||
|
except Exception as ex:
|
||||||
LOG.debug(ex)
|
LOG.debug(ex)
|
||||||
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
||||||
|
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import falcon
|
import falcon
|
||||||
from monasca_common.simport import simport
|
from monasca_common.simport import simport
|
||||||
|
from monasca_common.validation import metrics as metric_validation
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
|
||||||
@@ -23,7 +24,6 @@ from monasca_api.common.messaging import (
|
|||||||
from monasca_api.common.messaging.message_formats import (
|
from monasca_api.common.messaging.message_formats import (
|
||||||
metrics as metrics_message)
|
metrics as metrics_message)
|
||||||
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
from monasca_api.v2.common.exceptions import HTTPUnprocessableEntityError
|
||||||
from monasca_api.v2.common import validation
|
|
||||||
from monasca_api.v2.reference import helpers
|
from monasca_api.v2.reference import helpers
|
||||||
from monasca_api.v2.reference import resource
|
from monasca_api.v2.reference import resource
|
||||||
|
|
||||||
@@ -70,29 +70,6 @@ class Metrics(metrics_api_v2.MetricsV2API):
|
|||||||
raise falcon.HTTPInternalServerError('Service unavailable',
|
raise falcon.HTTPInternalServerError('Service unavailable',
|
||||||
ex.message)
|
ex.message)
|
||||||
|
|
||||||
def _validate_metrics(self, metrics):
|
|
||||||
|
|
||||||
try:
|
|
||||||
if isinstance(metrics, list):
|
|
||||||
for metric in metrics:
|
|
||||||
self._validate_single_metric(metric)
|
|
||||||
else:
|
|
||||||
self._validate_single_metric(metrics)
|
|
||||||
except Exception as ex:
|
|
||||||
LOG.exception(ex)
|
|
||||||
raise HTTPUnprocessableEntityError('Unprocessable Entity', ex.message)
|
|
||||||
|
|
||||||
def _validate_single_metric(self, metric):
|
|
||||||
validation.metric_name(metric['name'])
|
|
||||||
assert isinstance(metric['timestamp'], (int, float)), "Timestamp must be a number"
|
|
||||||
assert isinstance(metric['value'], (int, long, float)), "Value must be a number"
|
|
||||||
if "dimensions" in metric:
|
|
||||||
for dimension_key in metric['dimensions']:
|
|
||||||
validation.dimension_key(dimension_key)
|
|
||||||
validation.dimension_value(metric['dimensions'][dimension_key])
|
|
||||||
if "value_meta" in metric:
|
|
||||||
validation.validate_value_meta(metric['value_meta'])
|
|
||||||
|
|
||||||
def _send_metrics(self, metrics):
|
def _send_metrics(self, metrics):
|
||||||
try:
|
try:
|
||||||
self._message_queue.send_message(metrics)
|
self._message_queue.send_message(metrics)
|
||||||
@@ -120,7 +97,12 @@ class Metrics(metrics_api_v2.MetricsV2API):
|
|||||||
helpers.validate_authorization(req,
|
helpers.validate_authorization(req,
|
||||||
self._post_metrics_authorized_roles)
|
self._post_metrics_authorized_roles)
|
||||||
metrics = helpers.read_http_resource(req)
|
metrics = helpers.read_http_resource(req)
|
||||||
self._validate_metrics(metrics)
|
try:
|
||||||
|
metric_validation.validate(metrics)
|
||||||
|
except Exception as ex:
|
||||||
|
LOG.exception(ex)
|
||||||
|
raise HTTPUnprocessableEntityError("Unprocessable Entity", ex.message)
|
||||||
|
|
||||||
tenant_id = (
|
tenant_id = (
|
||||||
helpers.get_x_tenant_or_tenant_id(req,
|
helpers.get_x_tenant_or_tenant_id(req,
|
||||||
self._delegate_authorized_roles))
|
self._delegate_authorized_roles))
|
||||||
|
Reference in New Issue
Block a user