New Event uuid column and autoincremental id

Re added the autoincremental id column for the Event table, and added a
new column to store the event uuid. The uuid will be exposed to the user
as the id of the event, but internally the event will be handled using
the value of the id column.

Change-Id: I43464c6079bf7631fda0c4ab112b8f874c6190a3
Co-Authored-By: chmouel@enovance.com
Closes-Bug: #1261883
This commit is contained in:
Pablo Andres Fuente 2014-01-27 13:49:14 -03:00
parent 2361051705
commit ee5b3363db
8 changed files with 280 additions and 16 deletions

View File

@ -84,7 +84,6 @@ class EventController(object):
filter_func=lambda e: True, detail=False):
events = self.engine.list_events(req.context,
identity)
keys = None if detail else summary_keys
return [format_event(req, e, keys) for e in events if filter_func(e)]

View File

@ -0,0 +1,195 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import itertools
import sqlalchemy
import migrate.changeset.constraint as constraint
from heat.openstack.common import timeutils
def upgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
upgrade_sqlite(migrate_engine)
return
meta = sqlalchemy.MetaData(bind=migrate_engine)
event_table = sqlalchemy.Table('event', meta, autoload=True)
event_uuid = sqlalchemy.Column('uuid', sqlalchemy.String(length=36),
default=lambda: str(uuid.uuid4))
event_table.create_column(event_uuid)
if migrate_engine.name == 'postgresql':
sequence = sqlalchemy.Sequence('evt')
sqlalchemy.schema.CreateSequence(sequence,
bind=migrate_engine).execute()
event_id = sqlalchemy.Column('tmp_id', sqlalchemy.Integer,
server_default=sqlalchemy.text(
"nextval('evt')"))
else:
event_id = sqlalchemy.Column('tmp_id', sqlalchemy.Integer)
event_table.create_column(event_id)
fake_autoincrement = itertools.count(1)
event_list = event_table.select().order_by(
sqlalchemy.sql.expression.asc(event_table.c.created_at)).execute()
for event in event_list:
values = {'tmp_id': fake_autoincrement.next(), 'uuid': event.id}
update = event_table.update().where(
event_table.c.id == event.id).values(values)
migrate_engine.execute(update)
cons = constraint.UniqueConstraint('uuid', table=event_table)
cons.create()
event_table.c.id.drop()
event_table.c.tmp_id.alter('id', sqlalchemy.Integer)
cons = constraint.PrimaryKeyConstraint('tmp_id', table=event_table)
cons.create()
event_table.c.tmp_id.alter(sqlalchemy.Integer, autoincrement=True)
def upgrade_sqlite(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
#(pafuent) Here it isn't recommended to import the table from the models,
#because in future migrations the model could change and this migration
#could fail.
#I know it is ugly but it's the only way that I found to 'freeze' the model
#state for this migration.
stack_table = sqlalchemy.Table('stack', meta, autoload=True)
event_table = sqlalchemy.Table(
'new_event', meta,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('stack_id', sqlalchemy.String(36),
sqlalchemy.ForeignKey(stack_table.c.id),
nullable=False),
sqlalchemy.Column('uuid', sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True),
sqlalchemy.Column('resource_action', sqlalchemy.String(255)),
sqlalchemy.Column('resource_status', sqlalchemy.String(255)),
sqlalchemy.Column('resource_name', sqlalchemy.String(255)),
sqlalchemy.Column('physical_resource_id', sqlalchemy.String(255)),
sqlalchemy.Column('resource_status_reason', sqlalchemy.String(255)),
sqlalchemy.Column('resource_type', sqlalchemy.String(255)),
sqlalchemy.Column('resource_properties', sqlalchemy.PickleType),
sqlalchemy.Column('created_at', sqlalchemy.DateTime,
default=timeutils.utcnow),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime,
onupdate=timeutils.utcnow))
event_table.create()
prev_event_table = sqlalchemy.Table('event', meta, autoload=True)
event_list = list(prev_event_table.select().order_by(
sqlalchemy.sql.expression.asc(prev_event_table.c.created_at))
.execute())
for event in event_list:
values = {
'stack_id': event.stack_id,
'uuid': event.id,
'resource_action': event.resource_action,
'resource_status': event.resource_status,
'resource_name': event.resource_name,
'physical_resource_id': event.physical_resource_id,
'resource_status_reason': event.resource_status_reason,
'resource_type': event.resource_type,
'resource_properties': event.resource_properties}
migrate_engine.execute(event_table.insert(values))
prev_event_table.drop()
event_table.rename('event')
def downgrade(migrate_engine):
if migrate_engine.name == 'sqlite':
downgrade_sqlite(migrate_engine)
return
meta = sqlalchemy.MetaData(bind=migrate_engine)
event_table = sqlalchemy.Table('event', meta, autoload=True)
event_id = sqlalchemy.Column('tmp_id', sqlalchemy.String(length=36),
default=lambda: str(uuid.uuid4))
event_id.create(event_table)
event_list = event_table.select().execute()
for event in event_list:
values = {'tmp_id': event.uuid}
update = event_table.update().where(
event_table.c.uuid == event.uuid).values(values)
migrate_engine.execute(update)
event_table.c.id.drop()
event_table.c.uuid.drop()
cons = constraint.PrimaryKeyConstraint('tmp_id', table=event_table)
cons.create()
event_table.c.tmp_id.alter('id', default=lambda: str(uuid.uuid4))
def downgrade_sqlite(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
#(pafuent) Here it isn't recommended to import the table from the models,
#because in future migrations the model could change and this migration
#could fail.
#I know it is ugly but it's the only way that I found to 'freeze' the model
#state for this migration.
stack_table = sqlalchemy.Table('stack', meta, autoload=True)
event_table = sqlalchemy.Table(
'new_event', meta,
sqlalchemy.Column('id', sqlalchemy.String(36),
default=lambda: str(uuid.uuid4())),
sqlalchemy.Column('stack_id', sqlalchemy.String(36),
sqlalchemy.ForeignKey(stack_table.c.id),
nullable=False),
sqlalchemy.Column('resource_action', sqlalchemy.String(255)),
sqlalchemy.Column('resource_status', sqlalchemy.String(255)),
sqlalchemy.Column('resource_name', sqlalchemy.String(255)),
sqlalchemy.Column('physical_resource_id', sqlalchemy.String(255)),
sqlalchemy.Column('resource_status_reason', sqlalchemy.String(255)),
sqlalchemy.Column('resource_type', sqlalchemy.String(255)),
sqlalchemy.Column('resource_properties', sqlalchemy.PickleType),
sqlalchemy.Column('created_at', sqlalchemy.DateTime,
default=timeutils.utcnow),
sqlalchemy.Column('updated_at', sqlalchemy.DateTime,
onupdate=timeutils.utcnow))
event_table.create()
prev_event_table = sqlalchemy.Table('event', meta, autoload=True)
event_list = prev_event_table.select().execute()
for event in event_list:
values = {
'id': event.uuid,
'stack_id': event.stack_id,
'resource_action': event.resource_action,
'resource_status': event.resource_status,
'resource_name': event.resource_name,
'physical_resource_id': event.physical_resource_id,
'resource_status_reason': event.resource_status_reason,
'resource_type': event.resource_type,
'resource_properties': event.resource_properties}
migrate_engine.execute(event_table.insert(values))
prev_event_table.drop()
event_table.rename('event')

View File

@ -154,13 +154,15 @@ class Event(BASE, HeatBase):
__tablename__ = 'event'
id = sqlalchemy.Column(sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('events'))
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
resource_action = sqlalchemy.Column(sqlalchemy.String(255))
resource_status = sqlalchemy.Column(sqlalchemy.String(255))
resource_name = sqlalchemy.Column(sqlalchemy.String(255))

View File

@ -27,7 +27,7 @@ class Event(object):
def __init__(self, context, stack, action, status, reason,
physical_resource_id, resource_properties, resource_name,
resource_type, timestamp=None, id=None):
resource_type, uuid=None, timestamp=None, id=None):
'''
Initialise from a context, stack, and event information. The timestamp
and database ID may also be initialised if the event is already in the
@ -45,6 +45,7 @@ class Event(object):
self.resource_properties = dict(resource_properties)
except ValueError as ex:
self.resource_properties = {'Error': str(ex)}
self.uuid = uuid
self.timestamp = timestamp
self.id = id
@ -65,7 +66,7 @@ class Event(object):
return cls(context, st, ev.resource_action, ev.resource_status,
ev.resource_status_reason, ev.physical_resource_id,
ev.resource_properties, ev.resource_name,
ev.resource_type, ev.created_at, ev.id)
ev.resource_type, ev.uuid, ev.created_at, ev.id)
def store(self):
'''Store the Event in the database.'''
@ -80,6 +81,9 @@ class Event(object):
'resource_properties': self.resource_properties,
}
if self.uuid is not None:
ev['uuid'] = self.uuid
if self.timestamp is not None:
ev['created_at'] = self.timestamp
@ -92,10 +96,10 @@ class Event(object):
def identifier(self):
'''Return a unique identifier for the event.'''
if self.id is None:
if self.uuid is None:
return None
res_id = identifier.ResourceIdentifier(
resource_name=self.resource_name, **self.stack.identifier())
return identifier.EventIdentifier(event_id=str(self.id), **res_id)
return identifier.EventIdentifier(event_id=str(self.uuid), **res_id)

View File

@ -76,7 +76,7 @@ EVENT_KEYS = (
) = (
'event_identity',
STACK_ID, STACK_NAME,
"event_time",
'event_time',
RES_NAME, RES_PHYSICAL_ID, RES_ACTION,
RES_STATUS, RES_STATUS_DATA, RES_TYPE,
'resource_properties',

View File

@ -26,6 +26,7 @@ import sqlalchemy
import subprocess
import tempfile
import uuid
import datetime
from migrate.versioning import repository
@ -175,3 +176,58 @@ class TestHeatMigrations(test_migrations.BaseMigrationTestCase,
def _check_034(self, engine, data):
self.assertColumnExists(engine, 'raw_template', 'files')
def _pre_upgrade_035(self, engine):
#The stacks id are for the 33 version migration
event_table = get_table(engine, 'event')
data = [{
'id': '22222222-152e-405d-b13a-35d4c816390c',
'stack_id': '967aaefb-152e-405d-b13a-35d4c816390c',
'resource_action': 'Test',
'resource_status': 'TEST IN PROGRESS',
'resource_name': 'Testing Resource',
'physical_resource_id': '3465d1ec-8b21-46cd-9dgf-f66cttrh53f9',
'resource_status_reason': '',
'resource_type': '',
'resource_properties': None,
'created_at': datetime.datetime.now()},
{'id': '11111111-152e-405d-b13a-35d4c816390c',
'stack_id': '967aaefb-152e-405d-b13a-35d4c816390c',
'resource_action': 'Test',
'resource_status': 'TEST COMPLETE',
'resource_name': 'Testing Resource',
'physical_resource_id': '3465d1ec-8b21-46cd-9dgf-f66cttrh53f9',
'resource_status_reason': '',
'resource_type': '',
'resource_properties': None,
'created_at': datetime.datetime.now() +
datetime.timedelta(days=5)}]
engine.execute(event_table.insert(), data)
return data
def _check_035(self, engine, data):
self.assertColumnExists(engine, 'event', 'id')
self.assertColumnExists(engine, 'event', 'uuid')
event_table = get_table(engine, 'event')
events_in_db = list(event_table.select().execute())
last_id = 0
for index, event in enumerate(data):
last_id = index + 1
self.assertEqual(last_id, events_in_db[index].id)
self.assertEqual(event['id'], events_in_db[index].uuid)
#Check that the autoincremental id is ok
data = [{
'uuid': '33333333-152e-405d-b13a-35d4c816390c',
'stack_id': '967aaefb-152e-405d-b13a-35d4c816390c',
'resource_action': 'Test',
'resource_status': 'TEST COMPLEATE AGAIN',
'resource_name': 'Testing Resource',
'physical_resource_id': '3465d1ec-8b21-46cd-9dgf-f66cttrh53f9',
'resource_status_reason': '',
'resource_type': '',
'resource_properties': None,
'created_at': datetime.datetime.now()}]
result = engine.execute(event_table.insert(), data)
self.assertEqual(last_id + 1, result.inserted_primary_key[0])

View File

@ -118,6 +118,7 @@ class FormatTest(HeatTestCase):
return Event(utils.dummy_context(), self.stack, 'CREATE', 'COMPLETE',
'state changed', 'z3455xyc-9f88-404d-a85b-5315293e67de',
resource.properties, resource.name, resource.type(),
uuid='abc123yc-9f88-404d-a85b-531529456xyz',
id=event_id)
def test_format_stack_resource(self):
@ -152,11 +153,8 @@ class FormatTest(HeatTestCase):
self.assertEqual(['generic2'], res1['required_by'])
self.assertEqual([], res2['required_by'])
def test_format_event_id_integer(self):
self._test_format_event('42')
def test_format_event_id_uuid(self):
self._test_format_event('a3455d8c-9f88-404d-a85b-5315293e67de')
def test_format_event_identifier_uuid(self):
self._test_format_event('abc123yc-9f88-404d-a85b-531529456xyz')
def _test_format_event(self, event_id):
event = self._dummy_event(event_id)

View File

@ -124,19 +124,29 @@ class EventTest(HeatTestCase):
self.assertEqual('arizona', events[0].physical_resource_id)
def test_identifier(self):
event_uuid = 'abc123yc-9f88-404d-a85b-531529456xyz'
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource.properties,
self.resource.name, self.resource.type())
self.resource.name, self.resource.type(),
uuid=event_uuid)
eid = e.store()
e.store()
expected_identifier = {
'stack_name': self.stack.name,
'stack_id': self.stack.id,
'tenant': self.ctx.tenant_id,
'path': '/resources/EventTestResource/events/%s' % str(eid)
'path': '/resources/EventTestResource/events/%s' % str(event_uuid)
}
self.assertEqual(expected_identifier, e.identifier())
def test_identifier_is_none(self):
e = event.Event(self.ctx, self.stack, 'TEST', 'IN_PROGRESS', 'Testing',
'wibble', self.resource.properties,
self.resource.name, self.resource.type())
e.store()
self.assertIsNone(e.identifier())
def test_badprop(self):
tmpl = {'Type': 'ResourceWithRequiredProps',
'Properties': {'Foo': False}}