Merge "Remove db api and tables for CloudWatch"

This commit is contained in:
Zuul 2018-02-16 23:30:40 +00:00 committed by Gerrit Code Review
commit ab4c77bcc7
6 changed files with 59 additions and 238 deletions

View File

@ -1074,78 +1074,6 @@ def event_create(context, values):
return event_ref
def watch_rule_get(context, watch_rule_id):
result = context.session.query(models.WatchRule).get(watch_rule_id)
return result
def watch_rule_get_by_name(context, watch_rule_name):
result = context.session.query(
models.WatchRule).filter_by(name=watch_rule_name).first()
return result
def watch_rule_get_all(context):
results = context.session.query(models.WatchRule).all()
return results
def watch_rule_get_all_by_stack(context, stack_id):
results = context.session.query(
models.WatchRule).filter_by(stack_id=stack_id).all()
return results
def watch_rule_create(context, values):
obj_ref = models.WatchRule()
obj_ref.update(values)
obj_ref.save(context.session)
return obj_ref
def watch_rule_update(context, watch_id, values):
wr = watch_rule_get(context, watch_id)
if not wr:
raise exception.NotFound(_('Attempt to update a watch with id: '
'%(id)s %(msg)s') % {
'id': watch_id,
'msg': 'that does not exist'})
wr.update(values)
wr.save(context.session)
def watch_rule_delete(context, watch_id):
wr = watch_rule_get(context, watch_id)
if not wr:
raise exception.NotFound(_('Attempt to delete watch_rule: '
'%(id)s %(msg)s') % {
'id': watch_id,
'msg': 'that does not exist'})
with context.session.begin():
for d in wr.watch_data:
context.session.delete(d)
context.session.delete(wr)
def watch_data_create(context, values):
obj_ref = models.WatchData()
obj_ref.update(values)
obj_ref.save(context.session)
return obj_ref
def watch_data_get_all(context):
results = context.session.query(models.WatchData).all()
return results
def watch_data_get_all_by_watch_rule_id(context, watch_rule_id):
results = context.session.query(models.WatchData).filter_by(
watch_rule_id=watch_rule_id).all()
return results
def software_config_create(context, values):
obj_ref = models.SoftwareConfig()
obj_ref.update(values)

View File

@ -0,0 +1,53 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate import ForeignKeyConstraint
from sqlalchemy.engine import reflection
from sqlalchemy import MetaData
from sqlalchemy import Table
def upgrade(engine):
meta = MetaData()
meta.bind = engine
def _get_columns(source_table, params):
columns = set()
for column in params:
columns.add(source_table.c[column])
return columns
def _remove_foreign_key_constraints(engine, meta, table_name):
inspector = reflection.Inspector.from_engine(engine)
for fk in inspector.get_foreign_keys(table_name):
source_table = Table(table_name, meta, autoload=True)
target_table = Table(fk['referred_table'], meta, autoload=True)
fkey = ForeignKeyConstraint(
columns=_get_columns(source_table, fk['constrained_columns']),
refcolumns=_get_columns(target_table, fk['referred_columns']),
name=fk['name'])
fkey.drop()
def _drop_table_and_indexes(meta, table_name):
table = Table(table_name, meta, autoload=True)
for index in table.indexes:
index.drop()
table.drop()
table_names = ('watch_data', 'watch_rule')
for table_name in table_names:
_remove_foreign_key_constraints(engine, meta, table_name)
_drop_table_and_indexes(meta, table_name)

View File

@ -16,7 +16,6 @@
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
import sqlalchemy
from sqlalchemy.ext import declarative
from sqlalchemy.orm import backref
@ -307,39 +306,6 @@ class Resource(BASE, HeatBase, StateAware):
sqlalchemy.ForeignKey('raw_template.id'))
class WatchRule(BASE, HeatBase):
"""Represents a watch_rule created by the heat engine."""
__tablename__ = 'watch_rule'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
rule = sqlalchemy.Column('rule', types.Json)
state = sqlalchemy.Column('state', sqlalchemy.String(255))
last_evaluated = sqlalchemy.Column(sqlalchemy.DateTime,
default=timeutils.utcnow)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('watch_rule'))
class WatchData(BASE, HeatBase):
"""Represents a watch_data created by the heat engine."""
__tablename__ = 'watch_data'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
data = sqlalchemy.Column('data', types.Json)
watch_rule_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('watch_rule.id'),
nullable=False)
watch_rule = relationship(WatchRule, backref=backref('watch_data'))
class SoftwareConfig(BASE, HeatBase):
"""Represents a software configuration resource.

View File

@ -124,6 +124,7 @@ class HeatMigrationsCheckers(test_migrations.WalkVersionsMixin,
# Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE
exceptions = [
64, # drop constraint
86, # drop watch_rule/watch_data tables
]
# Reviewers: DO NOT ALLOW THINGS TO BE ADDED HERE

View File

@ -1437,27 +1437,6 @@ def create_event(ctx, legacy_prop_data=False, **kwargs):
return db_api.event_create(ctx, values)
def create_watch_rule(ctx, stack, **kwargs):
values = {
'name': 'test_rule',
'rule': json.loads('{"foo": "123"}'),
'state': 'normal',
'last_evaluated': timeutils.utcnow(),
'stack_id': stack.id,
}
values.update(kwargs)
return db_api.watch_rule_create(ctx, values)
def create_watch_data(ctx, watch_rule, **kwargs):
values = {
'data': json.loads('{"foo": "bar"}'),
'watch_rule_id': watch_rule.id
}
values.update(kwargs)
return db_api.watch_data_create(ctx, values)
def create_service(ctx, **kwargs):
values = {
'id': '7079762f-c863-4954-ba61-9dccb68c57e2',
@ -2913,117 +2892,6 @@ class DBAPIEventTest(common.HeatTestCase):
self.stack2.id))
class DBAPIWatchRuleTest(common.HeatTestCase):
def setUp(self):
super(DBAPIWatchRuleTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
def test_watch_rule_create_get(self):
watch_rule = create_watch_rule(self.ctx, self.stack)
ret_wr = db_api.watch_rule_get(self.ctx, watch_rule.id)
self.assertIsNotNone(ret_wr)
self.assertEqual('test_rule', ret_wr.name)
self.assertEqual('{"foo": "123"}', json.dumps(ret_wr.rule))
self.assertEqual('normal', ret_wr.state)
self.assertEqual(self.stack.id, ret_wr.stack_id)
def test_watch_rule_get_by_name(self):
watch_rule = create_watch_rule(self.ctx, self.stack)
ret_wr = db_api.watch_rule_get_by_name(self.ctx, watch_rule.name)
self.assertIsNotNone(ret_wr)
self.assertEqual('test_rule', ret_wr.name)
def test_watch_rule_get_all(self):
values = [
{'name': 'rule1'},
{'name': 'rule2'},
{'name': 'rule3'},
]
[create_watch_rule(self.ctx, self.stack, **val) for val in values]
wrs = db_api.watch_rule_get_all(self.ctx)
self.assertEqual(3, len(wrs))
names = [wr.name for wr in wrs]
[self.assertIn(val['name'], names) for val in values]
def test_watch_rule_get_all_by_stack(self):
self.stack1 = create_stack(self.ctx, self.template, self.user_creds)
values = [
{'name': 'rule1', 'stack_id': self.stack.id},
{'name': 'rule2', 'stack_id': self.stack1.id},
{'name': 'rule3', 'stack_id': self.stack1.id},
]
[create_watch_rule(self.ctx, self.stack, **val) for val in values]
wrs = db_api.watch_rule_get_all_by_stack(self.ctx, self.stack.id)
self.assertEqual(1, len(wrs))
wrs = db_api.watch_rule_get_all_by_stack(self.ctx, self.stack1.id)
self.assertEqual(2, len(wrs))
def test_watch_rule_update(self):
watch_rule = create_watch_rule(self.ctx, self.stack)
values = {
'name': 'test_rule_1',
'rule': json.loads('{"foo": "bar"}'),
'state': 'nodata',
}
db_api.watch_rule_update(self.ctx, watch_rule.id, values)
watch_rule = db_api.watch_rule_get(self.ctx, watch_rule.id)
self.assertEqual('test_rule_1', watch_rule.name)
self.assertEqual('{"foo": "bar"}', json.dumps(watch_rule.rule))
self.assertEqual('nodata', watch_rule.state)
self.assertRaises(exception.NotFound, db_api.watch_rule_update,
self.ctx, UUID2, values)
def test_watch_rule_delete(self):
watch_rule = create_watch_rule(self.ctx, self.stack)
create_watch_data(self.ctx, watch_rule)
db_api.watch_rule_delete(self.ctx, watch_rule.id)
self.assertIsNone(db_api.watch_rule_get(self.ctx, watch_rule.id))
self.assertRaises(exception.NotFound, db_api.watch_rule_delete,
self.ctx, UUID2)
# Testing associated watch data deletion
self.assertEqual([], db_api.watch_data_get_all(self.ctx))
class DBAPIWatchDataTest(common.HeatTestCase):
def setUp(self):
super(DBAPIWatchDataTest, self).setUp()
self.ctx = utils.dummy_context()
self.template = create_raw_template(self.ctx)
self.user_creds = create_user_creds(self.ctx)
self.stack = create_stack(self.ctx, self.template, self.user_creds)
self.watch_rule = create_watch_rule(self.ctx, self.stack)
def test_watch_data_create(self):
create_watch_data(self.ctx, self.watch_rule)
ret_data = db_api.watch_data_get_all(self.ctx)
self.assertEqual(1, len(ret_data))
self.assertEqual('{"foo": "bar"}', json.dumps(ret_data[0].data))
self.assertEqual(self.watch_rule.id, ret_data[0].watch_rule_id)
def test_watch_data_get_all(self):
values = [
{'data': json.loads('{"foo": "d1"}')},
{'data': json.loads('{"foo": "d2"}')},
{'data': json.loads('{"foo": "d3"}')}
]
[create_watch_data(self.ctx, self.watch_rule, **val) for val in values]
watch_data = db_api.watch_data_get_all(self.ctx)
self.assertEqual(3, len(watch_data))
data = [wd.data for wd in watch_data]
[self.assertIn(val['data'], data) for val in values]
class DBAPIServiceTest(common.HeatTestCase):
def setUp(self):
super(DBAPIServiceTest, self).setUp()

View File

@ -0,0 +1,5 @@
---
upgrade:
- |
The database upgrade for Heat Queens release drops 'watch_rule'
and 'watch_data' tables from the heat database.