Remove mysql repositories

The sqla repositores are used for all tests and the mysql repositories aren't
used or tested anymore. Reduce confusion in the code by deleting the dead
mysql reposistories.

Change etc/api-config.conf to use sqla to match the devstack files

Remove the mysql config section, only the database section should
be used now.

Set the repository defaults to be the correct ones in the config file
definition.

Change-Id: I748e91cadb059d91380b78d1e2af0ff0001fc397
This commit is contained in:
Craig Bryant 2017-02-10 13:53:44 -07:00 committed by Tomasz Trębski
parent 8d501d5443
commit 25914a5efe
9 changed files with 24 additions and 1493 deletions

View File

@ -119,12 +119,6 @@ keyspace: monasca
[database]
url = "%MONASCA_API_DATABASE_URL%"
# [mysql]
# database_name = mon
# hostname = %DATABASE_HOST%
# username = %MON_API_DATABASE_USER%
# password = %DATABASE_PASSWORD%
[keystone_authtoken]
identity_uri = http://%KEYSTONE_AUTH_HOST%:%KEYSTONE_AUTH_PORT%
auth_uri = http://%KEYSTONE_SERVICE_HOST%:%KEYSTONE_SERVICE_PORT%

View File

@ -46,13 +46,13 @@ metrics_driver = monasca_api.common.repositories.influxdb.metrics_repository:Met
#metrics_driver = monasca_api.common.repositories.cassandra.metrics_repository:MetricsRepository
# The driver to use for the alarm definitions repository
alarm_definitions_driver = monasca_api.common.repositories.mysql.alarm_definitions_repository:AlarmDefinitionsRepository
alarm_definitions_driver = monasca_api.common.repositories.sqla.alarm_definitions_repository:AlarmDefinitionsRepository
# The driver to use for the alarms repository
alarms_driver = monasca_api.common.repositories.mysql.alarms_repository:AlarmsRepository
alarms_driver = monasca_api.common.repositories.sqla.alarms_repository:AlarmsRepository
# The driver to use for the notifications repository
notifications_driver = monasca_api.common.repositories.mysql.notifications_repository:NotificationsRepository
notifications_driver = monasca_api.common.repositories.sqla.notifications_repository:NotificationsRepository
# The driver to use for the notification method type repository
notification_method_type_driver = monasca_api.common.repositories.sqla.notification_method_type_repository:NotificationMethodTypeRepository
@ -116,27 +116,9 @@ cluster_ip_addresses: 192.168.10.6
keyspace: monasca
# Below is configuration for database.
# The order of reading configuration for database is:
# 1) [mysql] section
# 2) [database]
# url
# 3) [database]
# host = 127.0.0.1
# username = monapi
# password = password
# drivername = mysq+pymysql
# port = 3306
# database = mon
# query = ""
[mysql]
database_name = mon
hostname = 192.168.10.4
username = monapi
password = password
# [database]
# url = "mysql+pymysql://monapi:password@127.0.0.1/mon"
# host = 127.0.0.1
[database]
url = "mysql+pymysql://monapi:password@192.168.10.4/mon"
# host = 192.168.10.4
# username = monapi
# password = password
# drivername = mysq+pymysql

View File

@ -1,696 +0,0 @@
# Copyright 2014,2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from monasca_common.repositories.mysql import mysql_repository
from oslo_log import log
from oslo_utils import uuidutils
from monasca_api.common.repositories import alarm_definitions_repository as adr
from monasca_api.common.repositories import exceptions
from monasca_api.common.repositories.model import sub_alarm_definition
LOG = log.getLogger(__name__)
class AlarmDefinitionsRepository(mysql_repository.MySQLRepository,
adr.AlarmDefinitionsRepository):
base_query = """
select ad.id, ad.name, ad.description, ad.expression,
ad.match_by, ad.severity, ad.actions_enabled,
aaa.alarm_actions, aao.ok_actions, aau.undetermined_actions
from alarm_definition as ad
left join (select alarm_definition_id,
group_concat(action_id) as alarm_actions
from alarm_action
where alarm_state = 'ALARM'
group by alarm_definition_id) as aaa
on aaa.alarm_definition_id = ad.id
left join (select alarm_definition_id,
group_concat(action_id) as ok_actions
from alarm_action
where alarm_state = 'OK'
group by alarm_definition_id) as aao
on aao.alarm_definition_id = ad.id
left join (select alarm_definition_id,
group_concat(action_id) as undetermined_actions
from alarm_action
where alarm_state = 'UNDETERMINED'
group by alarm_definition_id) as aau
on aau.alarm_definition_id = ad.id
"""
def __init__(self):
super(AlarmDefinitionsRepository, self).__init__()
@mysql_repository.mysql_try_catch_block
def get_alarm_definition(self, tenant_id, id):
parms = [tenant_id, id]
where_clause = """ where ad.tenant_id = %s
and ad.id = %s
and deleted_at is NULL """
query = AlarmDefinitionsRepository.base_query + where_clause
rows = self._execute_query(query, parms)
if rows:
return rows[0]
else:
raise exceptions.DoesNotExistException
@mysql_repository.mysql_try_catch_block
def get_alarm_definitions(self, tenant_id, name, dimensions, severity,
sort_by, offset, limit):
parms = [tenant_id]
select_clause = AlarmDefinitionsRepository.base_query
where_clause = " where ad.tenant_id = %s and deleted_at is NULL "
if name:
where_clause += " and ad.name = %s "
parms.append(name.encode('utf8'))
if severity:
severities = severity.split('|')
parms.extend([s.encode('utf8') for s in severities])
where_clause += " and (" + " or ".join(["ad.severity = %s" for s in severities]) + ")"
if sort_by is not None:
order_by_clause = " order by ad." + ",ad.".join(sort_by)
if 'id' not in sort_by:
order_by_clause += ",ad.id "
else:
order_by_clause += " "
else:
order_by_clause = " order by ad.id "
limit_offset_clause = " limit %s "
parms.append(limit + 1)
if offset:
limit_offset_clause += ' offset {}'.format(offset)
if dimensions:
inner_join = """ inner join sub_alarm_definition as sad
on sad.alarm_definition_id = ad.id """
i = 0
inner_join_parms = []
for n, v in dimensions.iteritems():
inner_join += """
inner join
(select distinct sub_alarm_definition_id
from sub_alarm_definition_dimension
where dimension_name = %s and value = %s) as
sadd{}
on sadd{}.sub_alarm_definition_id = sad.id
""".format(i, i)
inner_join_parms += [n.encode('utf8'), v.encode('utf8')]
i += 1
select_clause += inner_join
parms = inner_join_parms + parms
query = select_clause + where_clause + order_by_clause + limit_offset_clause
LOG.debug("Query: {}".format(query))
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def get_sub_alarms(self, tenant_id, alarm_definition_id):
parms = [tenant_id, alarm_definition_id]
query = """select distinct sa.id as sub_alarm_id, sa.alarm_id,
sa.expression
from sub_alarm as sa
inner join alarm as a
on a.id = sa.alarm_id
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
where ad.tenant_id = %s and ad.id = %s
"""
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def get_alarm_metrics(self, tenant_id, alarm_definition_id):
parms = [tenant_id, alarm_definition_id]
query = """select distinct a.id as alarm_id, md.name,
mdg.dimensions
from alarm as a
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
inner join alarm_metric as am on am.alarm_id = a.id
inner join metric_definition_dimensions as mdd
on mdd.id = am.metric_definition_dimensions_id
inner join metric_definition as md
on md.id = mdd.metric_definition_id
left join (select dimension_set_id,
group_concat(name, '=', value) as dimensions
from metric_dimension group by dimension_set_id) as mdg
on mdg.dimension_set_id = mdd.metric_dimension_set_id
where ad.tenant_id = %s and ad.id = %s
order by a.id
"""
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def delete_alarm_definition(self, tenant_id, alarm_definition_id):
"""Soft delete the alarm definition.
Soft delete the alarm definition and hard delete any associated
alarms.
:param tenant_id:
:param alarm_definition_id:
:returns True: -- if alarm definition exists and was deleted.
:returns False: -- if the alarm definition does not exists.
:raises RepositoryException:
"""
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
cursor.execute("""update alarm_definition
set deleted_at = NOW()
where tenant_id = %s and id = %s and deleted_at is
NULL""",
[tenant_id, alarm_definition_id])
if cursor.rowcount < 1:
return False
cursor.execute(
"""delete from alarm where alarm_definition_id = %s""",
[alarm_definition_id])
return True
@mysql_repository.mysql_try_catch_block
def get_sub_alarm_definitions(self, alarm_definition_id):
parms = [alarm_definition_id]
query = """select sad.*, sadd.dimensions
from sub_alarm_definition as sad
left join (select sub_alarm_definition_id,
group_concat(dimension_name, '=', value)
as dimensions
from sub_alarm_definition_dimension
group by sub_alarm_definition_id)
as sadd
on sadd.sub_alarm_definition_id = sad.id
where sad.alarm_definition_id = %s
"""
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def create_alarm_definition(self, tenant_id, name, expression,
sub_expr_list, description, severity, match_by,
alarm_actions, undetermined_actions,
ok_actions):
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
now = datetime.datetime.utcnow()
alarm_definition_id = uuidutils.generate_uuid()
cursor.execute("""insert into alarm_definition(
id,
tenant_id,
name,
description,
expression,
severity,
match_by,
actions_enabled,
created_at,
updated_at)
values (%s, %s, %s, %s, %s, %s, %s, %s, %s,
%s)""", (
alarm_definition_id, tenant_id, name.encode('utf8'),
description.encode('utf8'), expression.encode('utf8'),
severity.upper().encode('utf8'),
",".join(match_by).encode('utf8'), 1, now, now))
for sub_expr in sub_expr_list:
sub_alarm_definition_id = uuidutils.generate_uuid()
sub_expr.id = sub_alarm_definition_id
cursor.execute("""insert into sub_alarm_definition(
id,
alarm_definition_id,
function,
metric_name,
operator,
threshold,
period,
periods,
is_deterministic,
created_at,
updated_at)
values(%s,%s,%s,%s,%s,%s,%s,%s,%s,
%s, %s)""",
(
sub_alarm_definition_id,
alarm_definition_id,
sub_expr.normalized_func.encode('utf8'),
sub_expr.normalized_metric_name.encode(
"utf8"),
sub_expr.normalized_operator.encode('utf8'),
sub_expr.threshold.encode('utf8'),
sub_expr.period.encode('utf8'),
sub_expr.periods.encode('utf8'),
sub_expr.deterministic,
now,
now))
for dimension in sub_expr.dimensions_as_list:
parsed_dimension = dimension.split('=')
cursor.execute("""insert into
sub_alarm_definition_dimension(
sub_alarm_definition_id,
dimension_name,
value)
values(%s,%s,%s)""", (
sub_alarm_definition_id,
parsed_dimension[0].encode('utf8'),
parsed_dimension[1].encode('utf8')))
self._insert_into_alarm_action(cursor, alarm_definition_id,
alarm_actions, u"ALARM")
self._insert_into_alarm_action(cursor, alarm_definition_id,
undetermined_actions,
u"UNDETERMINED")
self._insert_into_alarm_action(cursor, alarm_definition_id,
ok_actions, u"OK")
return alarm_definition_id
@mysql_repository.mysql_try_catch_block
def update_or_patch_alarm_definition(self, tenant_id, alarm_definition_id,
name, expression,
sub_expr_list, actions_enabled,
description, alarm_actions,
ok_actions, undetermined_actions,
match_by, severity, patch=False):
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
# Get the original alarm definition from the DB
parms = [tenant_id, alarm_definition_id]
where_clause = """ where ad.tenant_id = %s
and ad.id = %s
and deleted_at is NULL """
query = AlarmDefinitionsRepository.base_query + where_clause
cursor.execute(query, parms)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException
original_row = cursor.fetchall()[0]
query = """
select sad.*, sadd.dimensions
from sub_alarm_definition as sad
left join (select sub_alarm_definition_id,
group_concat(dimension_name, '=',
value) as dimensions
from sub_alarm_definition_dimension
group by sub_alarm_definition_id) as sadd
on sadd.sub_alarm_definition_id = sad.id
where sad.alarm_definition_id = %s"""
cursor.execute(query, [alarm_definition_id])
rows = cursor.fetchall()
old_sub_alarm_defs_by_id = {}
for row in rows:
sad = sub_alarm_definition.SubAlarmDefinition(row=row)
old_sub_alarm_defs_by_id[sad.id] = sad
if expression:
(
changed_sub_alarm_defs_by_id,
new_sub_alarm_defs_by_id,
old_sub_alarm_defs_by_id,
unchanged_sub_alarm_defs_by_id
) = self._determine_sub_expr_changes(
alarm_definition_id, old_sub_alarm_defs_by_id,
sub_expr_list)
if old_sub_alarm_defs_by_id or new_sub_alarm_defs_by_id:
new_count = (len(new_sub_alarm_defs_by_id) +
len(changed_sub_alarm_defs_by_id) +
len(unchanged_sub_alarm_defs_by_id))
old_count = len(old_sub_alarm_defs_by_id)
if new_count != old_count:
msg = 'number of subexpressions must not change'
else:
msg = 'metrics in subexpression must not change'
raise exceptions.InvalidUpdateException(
msg.encode('utf8'))
else:
unchanged_sub_alarm_defs_by_id = old_sub_alarm_defs_by_id
changed_sub_alarm_defs_by_id = {}
new_sub_alarm_defs_by_id = {}
old_sub_alarm_defs_by_id = {}
# Get a common update time
now = datetime.datetime.utcnow()
# Update the alarm definition
query = """
update alarm_definition
set name = %s,
description = %s,
expression = %s,
match_by = %s,
severity = %s,
actions_enabled = %s,
updated_at = %s
where tenant_id = %s and id = %s"""
if name is None:
new_name = original_row['name']
else:
new_name = name.encode('utf8')
if description is None:
if patch:
new_description = original_row['description']
else:
new_description = ''
else:
new_description = description.encode('utf8')
if expression is None:
new_expression = original_row['expression']
else:
new_expression = expression.encode('utf8')
if severity is None:
if patch:
new_severity = original_row['severity']
else:
new_severity = 'LOW'
else:
new_severity = severity.encode('utf8')
if match_by is None:
if patch:
new_match_by = original_row['match_by']
else:
new_match_by = None
else:
new_match_by = ",".join(match_by).encode('utf8')
if new_match_by != original_row['match_by']:
msg = "match_by must not change".encode('utf8')
raise exceptions.InvalidUpdateException(msg)
if actions_enabled is None:
new_actions_enabled = original_row['actions_enabled']
else:
new_actions_enabled = actions_enabled
parms = [new_name,
new_description,
new_expression,
new_match_by,
new_severity,
1 if new_actions_enabled else 0,
now,
tenant_id,
alarm_definition_id]
cursor.execute(query, parms)
# Delete the old sub alarm definitions
query = """
delete from sub_alarm_definition where id = %s"""
for sub_alarm_def_id in old_sub_alarm_defs_by_id.values():
parms = [sub_alarm_def_id]
cursor.execute(query, parms)
# Update changed sub alarm definitions
query = """
update sub_alarm_definition
set operator = %s,
threshold = %s,
is_deterministic = %s,
updated_at = %s,
where id = %s"""
for sub_alarm_definition_id, sub_alarm_def in (
changed_sub_alarm_defs_by_id.iteritems()):
parms = [sub_alarm_def.operator,
sub_alarm_def.threshold,
sub_alarm_def.deterministic,
now,
sub_alarm_definition_id]
cursor.execute(query, parms)
# Insert new sub alarm definitions
query = """
insert into sub_alarm_definition(
id,
alarm_definition_id,
function,
metric_name,
operator,
threshold,
period,
periods,
is_deterministic,
created_at,
updated_at)
values(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
sub_query = """
insert into sub_alarm_definition_dimension(
sub_alarm_definition_id,
dimension_name,
value)
values(%s, %s,%s)"""
for sub_alarm_def in new_sub_alarm_defs_by_id.values():
parms = [sub_alarm_def.id,
sub_alarm_def.alarm_definition_id,
sub_alarm_def.function.encode('utf8'),
sub_alarm_def.metric_name.encode('utf8'),
sub_alarm_def.operator.encode('utf8'),
str(sub_alarm_def.threshold).encode('utf8'),
str(sub_alarm_def.period).encode('utf8'),
str(sub_alarm_def.periods).encode('utf8'),
sub_alarm_def.deterministic,
now,
now]
cursor.execute(query, parms)
for name, value in sub_alarm_def.dimensions.items():
parms = [sub_alarm_def.id, name.encode('utf8'),
value.encode('utf8')]
cursor.execute(sub_query, parms)
# Delete old alarm actions
if patch:
if alarm_actions is not None:
self._delete_alarm_actions(cursor, alarm_definition_id,
'ALARM')
if ok_actions is not None:
self._delete_alarm_actions(cursor, alarm_definition_id,
'OK')
if undetermined_actions is not None:
self._delete_alarm_actions(cursor, alarm_definition_id,
'UNDETERMINED')
else:
query = """
delete from alarm_action
where alarm_definition_id = %s"""
parms = [alarm_definition_id]
cursor.execute(query, parms)
# Insert new alarm actions
self._insert_into_alarm_action(cursor, alarm_definition_id,
alarm_actions,
u"ALARM")
self._insert_into_alarm_action(cursor, alarm_definition_id,
undetermined_actions,
u"UNDETERMINED")
self._insert_into_alarm_action(cursor, alarm_definition_id,
ok_actions,
u"OK")
# Get the updated alarm definition from the DB
parms = [tenant_id, alarm_definition_id]
where_clause = """ where ad.tenant_id = %s
and ad.id = %s
and deleted_at is NULL """
query = AlarmDefinitionsRepository.base_query + where_clause
cursor.execute(query, parms)
if cursor.rowcount < 1:
raise Exception("Failed to find current alarm definition")
updated_row = cursor.fetchall()[0]
sub_alarm_defs_dict = {'old': old_sub_alarm_defs_by_id,
'changed':
changed_sub_alarm_defs_by_id,
'new':
new_sub_alarm_defs_by_id,
'unchanged':
unchanged_sub_alarm_defs_by_id}
# Return the alarm def and the sub alarm defs
return updated_row, sub_alarm_defs_dict
def _determine_sub_expr_changes(self, alarm_definition_id,
old_sub_alarm_defs_by_id,
sub_expr_list):
old_sub_alarm_defs_set = set(
old_sub_alarm_defs_by_id.values())
new_sub_alarm_defs_set = set()
for sub_expr in sub_expr_list:
sad = sub_alarm_definition.SubAlarmDefinition(
sub_expr=sub_expr)
# Inject the alarm definition id.
sad.alarm_definition_id = alarm_definition_id.decode('utf8')
new_sub_alarm_defs_set.add(sad)
# Identify old or changed expressions
old_or_changed_sub_alarm_defs_set = (
old_sub_alarm_defs_set - new_sub_alarm_defs_set)
# Identify new or changed expressions
new_or_changed_sub_alarm_defs_set = (
new_sub_alarm_defs_set - old_sub_alarm_defs_set)
# Find changed expressions. O(n^2) == bad!
# This algo may not work if sub expressions are duplicated.
changed_sub_alarm_defs_by_id = {}
old_or_changed_sub_alarm_defs_set_to_remove = set()
new_or_changed_sub_alarm_defs_set_to_remove = set()
for old_or_changed in old_or_changed_sub_alarm_defs_set:
for new_or_changed in new_or_changed_sub_alarm_defs_set:
if old_or_changed.same_key_fields(new_or_changed):
old_or_changed_sub_alarm_defs_set_to_remove.add(
old_or_changed
)
new_or_changed_sub_alarm_defs_set_to_remove.add(
new_or_changed
)
changed_sub_alarm_defs_by_id[
old_or_changed.id] = (
new_or_changed)
old_or_changed_sub_alarm_defs_set = (
old_or_changed_sub_alarm_defs_set -
old_or_changed_sub_alarm_defs_set_to_remove
)
new_or_changed_sub_alarm_defs_set = (
new_or_changed_sub_alarm_defs_set -
new_or_changed_sub_alarm_defs_set_to_remove
)
# Create the list of unchanged expressions
unchanged_sub_alarm_defs_by_id = (
old_sub_alarm_defs_by_id.copy())
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
del unchanged_sub_alarm_defs_by_id[old_sub_alarm_def.id]
for sub_alarm_definition_id in (
changed_sub_alarm_defs_by_id.keys()):
del unchanged_sub_alarm_defs_by_id[
sub_alarm_definition_id]
# Remove old sub expressions
temp = {}
for old_sub_alarm_def in old_or_changed_sub_alarm_defs_set:
temp[old_sub_alarm_def.id] = old_sub_alarm_def
old_sub_alarm_defs_by_id = temp
# Create IDs for new expressions
new_sub_alarm_defs_by_id = {}
for new_sub_alarm_def in new_or_changed_sub_alarm_defs_set:
sub_alarm_definition_id = uuidutils.generate_uuid()
new_sub_alarm_def.id = sub_alarm_definition_id
new_sub_alarm_defs_by_id[sub_alarm_definition_id] = (
new_sub_alarm_def)
return (changed_sub_alarm_defs_by_id,
new_sub_alarm_defs_by_id,
old_sub_alarm_defs_by_id,
unchanged_sub_alarm_defs_by_id)
def _delete_alarm_actions(self, cursor, id, alarm_action_name):
query = """
delete
from alarm_action
where alarm_definition_id = %s and alarm_state = %s
"""
parms = [id, alarm_action_name]
cursor.execute(query, parms)
def _insert_into_alarm_action(self, cursor, alarm_definition_id, actions,
alarm_state):
if actions is None:
return
for action in actions:
cursor.execute("select id from notification_method where id = %s",
(action.encode('utf8'),))
row = cursor.fetchone()
if not row:
raise exceptions.RepositoryException(
"Non-existent notification id {} submitted for {} "
"notification action".format(action.encode('utf8'),
alarm_state.encode('utf8')))
cursor.execute("""insert into alarm_action(
alarm_definition_id,
alarm_state,
action_id)
values(%s,%s,%s)""", (
alarm_definition_id, alarm_state.encode('utf8'),
action.encode('utf8')))

View File

@ -1,510 +0,0 @@
# (C) Copyright 2014-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from time import time
from monasca_common.repositories.mysql import mysql_repository
from oslo_log import log
from monasca_api.common.repositories import alarms_repository
from monasca_api.common.repositories import exceptions
LOG = log.getLogger(__name__)
class AlarmsRepository(mysql_repository.MySQLRepository,
alarms_repository.AlarmsRepository):
base_query = """
select distinct a.id as alarm_id, a.state,
a.state_updated_at as state_updated_timestamp,
a.updated_at as updated_timestamp,
a.created_at as created_timestamp, a.lifecycle_state, a.link,
ad.id as alarm_definition_id, ad.name as alarm_definition_name,
ad.severity,
md.name as metric_name, mdg.dimensions as metric_dimensions
from alarm as a
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
inner join alarm_metric as am on am.alarm_id = a.id
inner join metric_definition_dimensions as mdd
on mdd.id = am.metric_definition_dimensions_id
inner join metric_definition as md
on md.id = mdd.metric_definition_id
left join (select dimension_set_id, name, value,
group_concat(name, '=', value) as dimensions
from metric_dimension group by dimension_set_id) as mdg
on mdg.dimension_set_id = mdd.metric_dimension_set_id
"""
base_list_query = """
select distinct a.id as alarm_id, a.state,
a.state_updated_at as state_updated_timestamp,
a.updated_at as updated_timestamp,
a.created_at as created_timestamp, a.lifecycle_state, a.link,
ad.id as alarm_definition_id, ad.name as alarm_definition_name,
ad.severity,
md.name as metric_name, mdg.dimensions as metric_dimensions
from alarm as a
inner join ({0}) as alarm_id_list on alarm_id_list.id = a.id
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
inner join alarm_metric as am on am.alarm_id = a.id
inner join metric_definition_dimensions as mdd
on mdd.id = am.metric_definition_dimensions_id
inner join metric_definition as md
on md.id = mdd.metric_definition_id
left join (select dimension_set_id, name, value,
group_concat(name, '=', value) as dimensions
from metric_dimension group by dimension_set_id) as mdg
on mdg.dimension_set_id = mdd.metric_dimension_set_id
"""
def __init__(self):
super(AlarmsRepository, self).__init__()
@mysql_repository.mysql_try_catch_block
def get_alarm_definition(self, tenant_id, alarm_id):
query = """
select *
from alarm_definition as ad
inner join alarm as a on a.alarm_definition_id = ad.id
where ad.tenant_id = %s and a.id = %s"""
alarm_definition_rows = self._execute_query(query,
(tenant_id, alarm_id))
if not alarm_definition_rows:
raise exceptions.DoesNotExistException
# There should only be 1 row.
return alarm_definition_rows[0]
@mysql_repository.mysql_try_catch_block
def get_alarm_metrics(self, alarm_id):
parms = [alarm_id]
query = """select distinct a.id as alarm_id, md.name,
mdg.dimensions
from alarm as a
inner join alarm_metric as am on am.alarm_id = a.id
inner join metric_definition_dimensions as mdd
on mdd.id = am.metric_definition_dimensions_id
inner join metric_definition as md
on md.id = mdd.metric_definition_id
left join (select dimension_set_id,
group_concat(name, '=', value) as dimensions
from metric_dimension group by dimension_set_id) as mdg
on mdg.dimension_set_id = mdd.metric_dimension_set_id
where a.id = %s
order by a.id
"""
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def get_sub_alarms(self, tenant_id, alarm_id):
parms = [tenant_id, alarm_id]
query = """select distinct sa.id as sub_alarm_id, sa.alarm_id,
sa.expression, ad.id as alarm_definition_id
from sub_alarm as sa
inner join alarm as a
on a.id = sa.alarm_id
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
where ad.tenant_id = %s and a.id = %s
"""
return self._execute_query(query, parms)
@mysql_repository.mysql_try_catch_block
def update_alarm(self, tenant_id, id, state, lifecycle_state, link):
cnxn, cursor = self._get_cnxn_cursor_tuple()
time_ms = int(round(time() * 1000.0))
now = datetime.utcfromtimestamp(time_ms / 1000.0)
with cnxn:
select_query = """
select a.state, a.link, a.lifecycle_state
from alarm as a
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
where ad.tenant_id = %s and a.id = %s"""
cursor.execute(select_query, (tenant_id, id))
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException
prev_alarm = cursor.fetchone()
parms = [lifecycle_state, link, now]
set_str = "lifecycle_state = %s, link = %s, updated_at = %s"
if state != prev_alarm['state']:
parms.append(state)
parms.append(now)
set_str += ",state = %s, state_updated_at = %s"
parms.extend([tenant_id, id])
update_query = """
update alarm
set {}
where alarm.id in
(select distinct id
from
(select distinct alarm.id
from alarm
inner join alarm_definition
on alarm_definition.id = alarm.alarm_definition_id
where alarm_definition.tenant_id = %s and alarm.id = %s)
as tmptable
)""".format(set_str)
cursor.execute(update_query, parms)
return prev_alarm, time_ms
@mysql_repository.mysql_try_catch_block
def delete_alarm(self, tenant_id, id):
parms = [tenant_id, id]
query = """
delete alarm.*
from alarm
join
(select distinct a.id
from alarm as a
inner join alarm_definition as ad
on ad.id = a.alarm_definition_id
where ad.tenant_id = %s and a.id = %s) as b
on b.id = alarm.id
"""
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
cursor.execute(query, parms)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException
@mysql_repository.mysql_try_catch_block
def get_alarm(self, tenant_id, id):
parms = [tenant_id, id]
select_clause = AlarmsRepository.base_query
where_clause = """ where ad.tenant_id = %s
and a.id = %s """
query = select_clause + where_clause
rows = self._execute_query(query, parms)
if not rows:
raise exceptions.DoesNotExistException
else:
return rows
@mysql_repository.mysql_try_catch_block
def get_alarms(self, tenant_id, query_parms, offset, limit):
parms = [tenant_id]
select_clause = AlarmsRepository.base_list_query
sub_query = "select a.id " \
"from alarm as a " \
"join alarm_definition as ad on a.alarm_definition_id = ad.id " \
"where ad.tenant_id = %s "
if 'alarm_definition_id' in query_parms:
parms.append(query_parms['alarm_definition_id'])
sub_query += " and ad.id = %s "
if 'metric_name' in query_parms:
sub_select_clause = """
and a.id in (select distinct a.id from alarm as a
inner join alarm_metric as am on am.alarm_id
= a.id
inner join metric_definition_dimensions as mdd
on mdd.id =
am.metric_definition_dimensions_id
inner join (select distinct id from
metric_definition
where name = %s) as md
on md.id = mdd.metric_definition_id)
"""
parms.append(query_parms['metric_name'].encode('utf8'))
sub_query += sub_select_clause
if 'state' in query_parms:
parms.append(query_parms['state'].encode('utf8'))
sub_query += " and a.state = %s "
if 'severity' in query_parms:
severities = query_parms['severity'].split('|')
parms.extend([s.encode('utf8') for s in severities])
sub_query += " and (" + " or ".join(["ad.severity = %s" for s in severities]) + ")"
if 'lifecycle_state' in query_parms:
parms.append(query_parms['lifecycle_state'].encode('utf8'))
sub_query += " and a.lifecycle_state = %s"
if 'link' in query_parms:
parms.append(query_parms['link'].encode('utf8'))
sub_query += " and a.link = %s"
if 'state_updated_start_time' in query_parms:
parms.append(query_parms['state_updated_start_time']
.encode("utf8"))
sub_query += " and state_updated_at >= %s"
if 'metric_dimensions' in query_parms:
sub_select_clause = """
and a.id in (select distinct a.id from alarm as a
inner join alarm_metric as am on am.alarm_id
= a.id
inner join metric_definition_dimensions as mdd
on mdd.id =
am.metric_definition_dimensions_id
"""
sub_select_parms = []
i = 0
for metric_dimension in query_parms['metric_dimensions'].items():
if not metric_dimension[1]:
values = None
value_sql = ""
elif '|' in metric_dimension[1]:
values = metric_dimension[1].encode('utf8').split('|')
value_sql = " and ("
value_sql += " or ".join(["value = %s" for j in xrange(len(values))])
value_sql += ') '
else:
values = [metric_dimension[1]]
value_sql = " and value = %s "
sub_select_clause += """
inner join (select distinct dimension_set_id
from metric_dimension
where name = %s {}) as md{}
on md{}.dimension_set_id = mdd.metric_dimension_set_id
""".format(value_sql, i, i)
i += 1
sub_select_parms.append(metric_dimension[0].encode('utf8'))
if len(metric_dimension) > 1 and values:
sub_select_parms.extend(values)
sub_select_clause += ")"
parms += sub_select_parms
sub_query += sub_select_clause
if 'sort_by' in query_parms:
# Convert friendly names to column names
columns_mapper = {'alarm_id': 'a.id',
'alarm_definition_id': 'ad.id',
'alarm_definition_name': 'ad.name',
# check this here to avoid conflict with updated_timestamp
'state_updated_timestamp': 'a.state_updated_at',
'updated_timestamp': 'a.updated_at',
'created_timestamp': 'a.created_at',
# use custom ordering instead of alphanumeric
'severity': 'FIELD(severity, "LOW", "MEDIUM", "HIGH", "CRITICAL")',
'state': 'FIELD(state, "OK", "UNDETERMINED", "ALARM")'}
order_columns, received_cols = self._remap_columns(query_parms['sort_by'], columns_mapper)
if not received_cols.get('alarm_id', False):
order_columns.append('a.id')
order_by_clause = " order by {} ".format(','.join(order_columns))
else:
order_by_clause = " order by a.id "
if offset:
offset_clause = " offset {}".format(offset)
else:
offset_clause = ""
if limit:
limit_clause = " limit %s "
parms.append(limit + 1)
else:
limit_clause = ""
query = select_clause.format(sub_query + order_by_clause + limit_clause + offset_clause) + order_by_clause
LOG.debug("Query: {}".format(query))
return self._execute_query(query, parms)
def _remap_columns(self, columns, columns_mapper):
received_cols = {}
order_columns = []
for col in columns:
col_values = col.split()
col_name = col_values[0]
order_column = columns_mapper.get(col_name, col_name)
if len(col_values) > 1:
mode = col_values[1]
order_column = "{} {}".format(order_column, mode)
order_columns.append(order_column)
received_cols[col_name] = True
return order_columns, received_cols
@mysql_repository.mysql_try_catch_block
def get_alarms_count(self, tenant_id, query_parms, offset, limit):
select_clause = """select count(*) as count{}
from alarm as a
join alarm_definition as ad on ad.id = a.alarm_definition_id
"""
if 'group_by' in query_parms:
group_by_str = ",".join(query_parms['group_by'])
metric_group_by = {'metric_name',
'dimension_name',
'dimension_value'}.intersection(set(query_parms['group_by']))
if metric_group_by:
metric_select = """
join ( select distinct am.alarm_id{}
from metric_definition as md
join metric_definition_dimensions as mdd on md.id = mdd.metric_definition_id
join metric_dimension as mdim on mdd.metric_dimension_set_id = mdim.dimension_set_id
join alarm_metric as am on am.metric_definition_dimensions_id = mdd.id
) as metrics on a.id = metrics.alarm_id """
sub_select_clause = ""
if 'metric_name' in metric_group_by:
sub_select_clause += ', md.name as metric_name'
if 'dimension_name' in metric_group_by:
sub_select_clause += ', mdim.name as dimension_name'
if 'dimension_value' in metric_group_by:
sub_select_clause += ', mdim.value as dimension_value'
select_clause += metric_select.format(sub_select_clause)
else:
group_by_str = ""
parms = []
where_clause = " where ad.tenant_id = %s "
parms.append(tenant_id)
if 'alarm_definition_id' in query_parms:
parms.append(query_parms['alarm_definition_id'])
where_clause += " and ad.id = %s "
if 'state' in query_parms:
parms.append(query_parms['state'].encode('utf8'))
where_clause += " and a.state = %s "
if 'severity' in query_parms:
severities = query_parms['severity'].split('|')
parms.extend([s.encode('utf8') for s in severities])
where_clause += " and (" + " or ".join(["ad.severity = %s" for s in severities]) + ")"
if 'lifecycle_state' in query_parms:
parms.append(query_parms['lifecycle_state'].encode('utf8'))
where_clause += " and a.lifecycle_state = %s "
if 'link' in query_parms:
parms.append(query_parms['link'].encode('utf8'))
where_clause += " and a.link = %s "
if 'state_updated_start_time' in query_parms:
parms.append(query_parms['state_updated_start_time']
.encode("utf8"))
where_clause += " and state_updated_at >= %s "
if 'metric_name' in query_parms:
sub_select_clause = """
and a.id in (select distinct a.id from alarm as a
inner join alarm_metric as am on am.alarm_id
= a.id
inner join metric_definition_dimensions as mdd
on mdd.id =
am.metric_definition_dimensions_id
inner join (select distinct id from
metric_definition
where name = %s) as md
on md.id = mdd.metric_definition_id)
"""
parms.append(query_parms['metric_name'].encode('utf8'))
where_clause += sub_select_clause
if 'metric_dimensions' in query_parms:
sub_select_clause = """
and a.id in (select distinct a.id from alarm as a
inner join alarm_metric as am on am.alarm_id = a.id
inner join metric_definition_dimensions as mdd
on mdd.id = am.metric_definition_dimensions_id
"""
sub_select_parms = []
i = 0
for metric_dimension in query_parms['metric_dimensions']:
parsed_dimension = metric_dimension.split(':')
sub_select_clause += """
inner join (select distinct dimension_set_id
from metric_dimension
where name = %s and value = %s) as md{}
on md{}.dimension_set_id = mdd.metric_dimension_set_id
""".format(i, i)
i += 1
sub_select_parms += [parsed_dimension[0].encode('utf8'),
parsed_dimension[1].encode('utf8')]
sub_select_clause += ")"
parms += sub_select_parms
where_clause += sub_select_clause
if group_by_str:
group_order_by_clause = " group by {} order by {} ".format(group_by_str, group_by_str)
else:
group_order_by_clause = ""
if limit:
limit_clause = " limit %s "
parms.append(limit + 1)
else:
limit_clause = ""
if offset:
offset_clause = " offset {} ".format(offset)
else:
offset_clause = ""
select_group_by = ',' + group_by_str if group_by_str else ""
select_clause = select_clause.format(select_group_by)
query = select_clause + where_clause + group_order_by_clause + limit_clause + offset_clause
return self._execute_query(query, parms)

View File

@ -1,31 +0,0 @@
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_common.repositories.mysql import mysql_repository
from monasca_api.common.repositories import notification_method_type_repository as nr
class NotificationMethodTypeRepository(mysql_repository.MySQLRepository,
nr.NotificationMethodTypeRepository):
def __init__(self):
super(NotificationMethodTypeRepository, self).__init__()
@mysql_repository.mysql_try_catch_block
def list_notification_method_types(self):
query = "select name from notification_method_type"
rows = self._execute_query(query)
return rows

View File

@ -1,189 +0,0 @@
# (C) Copyright 2014-2016 Hewlett Packard Enterprise Development Company LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from monasca_common.repositories.mysql import mysql_repository
from oslo_utils import uuidutils
from monasca_api.common.repositories import exceptions
from monasca_api.common.repositories import notifications_repository as nr
class NotificationsRepository(mysql_repository.MySQLRepository,
nr.NotificationsRepository):
def __init__(self):
super(NotificationsRepository, self).__init__()
def create_notification(self, tenant_id, name,
notification_type, address, period):
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
query = """
select *
from notification_method
where tenant_id = %s and name = %s"""
parms = [tenant_id, name.encode('utf8')]
cursor.execute(query, parms)
if cursor.rowcount > 0:
raise exceptions.AlreadyExistsException('Notification already '
'exists')
now = datetime.datetime.utcnow()
notification_id = uuidutils.generate_uuid()
query = """
insert into notification_method(
id,
tenant_id,
name,
type,
address,
period,
created_at,
updated_at
) values (%s, %s, %s, %s, %s, %s, %s, %s)"""
parms = [notification_id,
tenant_id,
name.encode('utf8'),
notification_type.encode('utf8'),
address.encode('utf8'),
period,
now,
now]
cursor.execute(query, parms)
return notification_id
@mysql_repository.mysql_try_catch_block
def list_notifications(self, tenant_id, sort_by, offset, limit):
query = """
select *
from notification_method
where tenant_id = %s"""
parms = [tenant_id]
if sort_by:
query += " order by " + ','.join(sort_by)
if 'id' not in sort_by:
query += ",id "
else:
query += " "
else:
query += " order by id "
query += " limit %s "
parms.append(limit + 1)
if offset:
query += ' offset {}'.format(offset)
rows = self._execute_query(query, parms)
return rows
@mysql_repository.mysql_try_catch_block
def delete_notification(self, tenant_id, id):
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
query = """
select *
from notification_method
where tenant_id = %s and id = %s"""
parms = [tenant_id, id]
cursor.execute(query, parms)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException
query = """
delete
from notification_method
where tenant_id = %s and id = %s"""
cursor.execute(query, parms)
@mysql_repository.mysql_try_catch_block
def list_notification(self, tenant_id, notification_id):
parms = [tenant_id, notification_id]
query = """
select *
from notification_method
where tenant_id = %s and id = %s"""
rows = self._execute_query(query, parms)
if rows:
return rows[0]
else:
raise exceptions.DoesNotExistException
@mysql_repository.mysql_try_catch_block
def find_notification_by_name(self, tenant_id, name):
cnxn, cursor = self._get_cnxn_cursor_tuple()
parms = [tenant_id, name]
with cnxn:
query = """
select *
from notification_method
where tenant_id = %s and name = %s
"""
rows = self._execute_query(query, parms)
if rows:
return rows[0]
else:
return None
@mysql_repository.mysql_try_catch_block
def update_notification(
self, id, tenant_id, name, type, address, period):
cnxn, cursor = self._get_cnxn_cursor_tuple()
with cnxn:
now = datetime.datetime.utcnow()
query = """
update notification_method
set name = %s,
type = %s,
address = %s,
period = %s,
updated_at = %s
where tenant_id = %s and id = %s"""
parms = [name.encode('utf8'), type.encode('utf8'), address.encode(
'utf8'), period, now, tenant_id, id]
cursor.execute(query, parms)
if cursor.rowcount < 1:
raise exceptions.DoesNotExistException('Not Found')

View File

@ -1,5 +1,6 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2016 FUJITSU LIMITED
# (C) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -34,20 +35,13 @@ class SQLRepository(object):
self.conf = cfg.CONF
url = None
if self.conf.mysql.database_name is not None:
settings_db = (self.conf.mysql.username,
self.conf.mysql.password,
self.conf.mysql.hostname,
self.conf.mysql.database_name)
url = make_url("mysql+pymysql://%s:%s@%s/%s" % settings_db)
if self.conf.database.url is not None:
url = make_url(self.conf.database.url)
else:
if self.conf.database.url is not None:
url = make_url(self.conf.database.url)
else:
database_conf = dict(self.conf.database)
if 'url' in database_conf:
del database_conf['url']
url = URL(**database_conf)
database_conf = dict(self.conf.database)
if 'url' in database_conf:
del database_conf['url']
url = URL(**database_conf)
from sqlalchemy import create_engine
self._db_engine = create_engine(url, pool_recycle=3600)

View File

@ -1,6 +1,6 @@
# Copyright 2014 IBM Corp.
# Copyright 2016 FUJITSU LIMITED
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP
# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
@ -66,30 +66,28 @@ messaging_group = cfg.OptGroup(name='messaging', title='messaging')
cfg.CONF.register_group(messaging_group)
cfg.CONF.register_opts(messaging_opts, messaging_group)
base_sqla_path = 'monasca_api.common.repositories.sqla.'
repositories_opts = [
cfg.StrOpt('metrics_driver', default='influxdb_metrics_repo',
cfg.StrOpt('metrics_driver',
default='monasca_api.common.repositories.influxdb.metrics_repository:MetricsRepository',
help='The repository driver to use for metrics'),
cfg.StrOpt('alarm_definitions_driver',
default='mysql_alarm_definitions_repo',
default=base_sqla_path + 'alarm_definitions_repository:AlarmDefinitionsRepository',
help='The repository driver to use for alarm definitions'),
cfg.StrOpt('alarms_driver', default='mysql_alarms_repo',
cfg.StrOpt('alarms_driver',
default=base_sqla_path + 'alarms_repository:AlarmsRepository',
help='The repository driver to use for alarms'),
cfg.StrOpt('streams_driver', default='mysql_streams_repo',
help='The repository driver to use for streams'),
cfg.StrOpt('events_driver', default='mysql_events_repo',
help='The repository driver to use for events'),
cfg.StrOpt('transforms_driver', default='mysql_transforms_repo',
help='The repository driver to use for transforms'),
cfg.StrOpt('notifications_driver', default='mysql_notifications_repo',
cfg.StrOpt('notifications_driver',
default=base_sqla_path + 'notifications_repository:NotificationsRepository',
help='The repository driver to use for notifications'),
cfg.StrOpt('notification_method_type_driver', default='mysql_notifications_repo',
cfg.StrOpt('notification_method_type_driver',
default=base_sqla_path + 'notification_method_type_repository:NotificationMethodTypeRepository',
help='The repository driver to use for notifications')]
repositories_group = cfg.OptGroup(name='repositories', title='repositories')
cfg.CONF.register_group(repositories_group)
cfg.CONF.register_opts(repositories_opts, repositories_group)
kafka_opts = [cfg.StrOpt('uri', help='Address to kafka server. For example: '
'uri=192.168.1.191:9092'),
cfg.StrOpt('metrics_topic', default='metrics',
@ -142,16 +140,6 @@ cassandra_group = cfg.OptGroup(name='cassandra', title='cassandra')
cfg.CONF.register_group(cassandra_group)
cfg.CONF.register_opts(cassandra_opts, cassandra_group)
mysql_opts = [cfg.StrOpt('database_name'),
cfg.StrOpt('hostname'),
cfg.StrOpt('username'),
cfg.StrOpt('password', secret=True)]
mysql_group = cfg.OptGroup(name='mysql', title='mysql')
cfg.CONF.register_group(mysql_group)
cfg.CONF.register_opts(mysql_opts, mysql_group)
sql_opts = [cfg.StrOpt('url', default=None),
cfg.StrOpt('host', default=None),
cfg.StrOpt('username', default=None),
@ -162,6 +150,5 @@ sql_opts = [cfg.StrOpt('url', default=None),
cfg.StrOpt('query', default=None)]
sql_group = cfg.OptGroup(name='database', title='sql')
cfg.CONF.register_group(sql_group)
cfg.CONF.register_opts(sql_opts, sql_group)