Merge "Add prefix to user message event ids"

This commit is contained in:
Jenkins 2017-01-18 16:31:23 +00:00 committed by Gerrit Code Review
commit 84625957b4
16 changed files with 299 additions and 44 deletions

View File

@ -207,7 +207,8 @@ class HostCommands(object):
class DbCommands(object):
"""Class for managing the database."""
online_migrations = (db.migrate_consistencygroups_to_groups,)
online_migrations = (db.migrate_consistencygroups_to_groups,
db.migrate_add_message_prefix)
def __init__(self):
pass

View File

@ -1333,6 +1333,19 @@ def consistencygroup_include_in_cluster(context, cluster, partial_rename=True,
**filters)
def migrate_add_message_prefix(context, max_count, force=False):
"""Change Message event ids to start with the VOLUME_ prefix.
:param max_count: The maximum number of messages to consider in
this run.
:param force: Ignored in this migration
:returns: number of messages needing migration, number of
messages migrated (both will always be less than
max_count).
"""
return IMPL.migrate_add_message_prefix(context, max_count, force)
###################

View File

@ -5405,6 +5405,24 @@ def consistencygroup_include_in_cluster(context, cluster,
partial_rename, filters)
@require_admin_context
def migrate_add_message_prefix(context, max_count, force=False):
prefix = "VOLUME_"
session = get_session()
with session.begin():
messages = (model_query(context, models.Message.id, session=session).
filter(~models.Message.event_id.like(prefix + '%')).
limit(max_count))
count_all = messages.count()
count_hit = (model_query(context, models.Message, session=session).
filter(models.Message.id.in_(messages.as_scalar())).
update({'event_id': prefix + models.Message.event_id},
synchronize_session=False))
return count_all, count_hit
###############################

View File

@ -0,0 +1,21 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import MetaData, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
messages = Table('messages', meta, autoload=True)
messages.c.request_id.alter(nullable=True)

View File

@ -21,24 +21,32 @@ specific.
from cinder.i18n import _
UNKNOWN_ERROR = '000001'
UNABLE_TO_ALLOCATE = '000002'
ATTACH_READONLY_VOLUME = '000003'
IMAGE_FROM_VOLUME_OVER_QUOTA = '000004'
class EventIds(object):
UNKNOWN_ERROR = 'VOLUME_000001'
UNABLE_TO_ALLOCATE = 'VOLUME_000002'
ATTACH_READONLY_VOLUME = 'VOLUME_000003'
IMAGE_FROM_VOLUME_OVER_QUOTA = 'VOLUME_000004'
event_id_message_map = {
UNKNOWN_ERROR: _("An unknown error occurred."),
UNABLE_TO_ALLOCATE: _("No storage could be allocated for this volume "
"request. You may be able to try another size or"
" volume type."),
ATTACH_READONLY_VOLUME: _("A readonly volume must be attached as "
"readonly."),
IMAGE_FROM_VOLUME_OVER_QUOTA: _("Failed to copy volume to image as image "
"quota has been met. Please delete images"
" or have your limit increased, then try "
"again."),
EventIds.UNKNOWN_ERROR: _("An unknown error occurred."),
EventIds.UNABLE_TO_ALLOCATE: _(
"No storage could be allocated for this volume "
"request. You may be able to try another size or"
" volume type."),
EventIds.ATTACH_READONLY_VOLUME: _(
"A readonly volume must be attached as readonly."),
EventIds.IMAGE_FROM_VOLUME_OVER_QUOTA: _(
"Failed to copy volume to image as image quota has been met. Please "
"delete images or have your limit increased, then try again."),
}
def get_message_text(event_id):
# FIXME(ameade): In the Ocata release, this check can be removed as
# there should no longer be any event ids that do not start with the prefix
if not event_id.startswith("VOLUME_"):
event_id = "VOLUME_" + event_id
return event_id_message_map[event_id]

View File

@ -131,7 +131,7 @@ class ScheduleCreateVolumeTask(flow_utils.CinderTask):
if isinstance(e, exception.NoValidBackend):
self.message_api.create(
context,
defined_messages.UNABLE_TO_ALLOCATE,
defined_messages.EventIds.UNABLE_TO_ALLOCATE,
context.project_id,
resource_type=resource_types.VOLUME,
resource_uuid=request_spec['volume_id'])

View File

@ -31,7 +31,7 @@ DEFAULT_AZ = "fakeaz"
def fake_message(id, **kwargs):
message = {
'id': id,
'event_id': defined_messages.UNABLE_TO_ALLOCATE,
'event_id': defined_messages.EventIds.UNABLE_TO_ALLOCATE,
'message_level': "ERROR",
'request_id': FAKE_UUID,
'updated_at': datetime.datetime(1900, 1, 1, 1, 1, 1,

View File

@ -0,0 +1,42 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import iso8601
from cinder.message import defined_messages
from cinder.tests.unit import fake_constants as fake
FAKE_UUID = fake.OBJECT_ID
def stub_message(id, **kwargs):
message = {
'id': id,
'event_id': defined_messages.EventIds.UNABLE_TO_ALLOCATE,
'message_level': "ERROR",
'request_id': FAKE_UUID,
'updated_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
'created_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
'expires_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
}
message.update(kwargs)
return message
def stub_message_get(self, context, message_id):
return stub_message(message_id)

View File

@ -53,12 +53,12 @@ class MessageApiTest(test.TestCase):
'request_id': 'fakerequestid',
'resource_type': 'fake_resource_type',
'resource_uuid': None,
'event_id': defined_messages.UNABLE_TO_ALLOCATE,
'event_id': defined_messages.EventIds.UNABLE_TO_ALLOCATE,
'message_level': 'ERROR',
'expires_at': expected_expires_at,
}
self.message_api.create(self.ctxt,
defined_messages.UNABLE_TO_ALLOCATE,
defined_messages.EventIds.UNABLE_TO_ALLOCATE,
"fakeproject",
resource_type="fake_resource_type")
@ -70,7 +70,7 @@ class MessageApiTest(test.TestCase):
self.mock_object(self.message_api.db, 'create',
side_effect=Exception())
self.message_api.create(self.ctxt,
defined_messages.UNABLE_TO_ALLOCATE,
defined_messages.EventIds.UNABLE_TO_ALLOCATE,
"fakeproject",
"fake_resource")
@ -109,18 +109,20 @@ class MessageApiTest(test.TestCase):
def create_message_for_tests(self):
"""Create messages to test pagination functionality"""
utils.create_message(
self.ctxt, event_id=defined_messages.UNKNOWN_ERROR)
self.ctxt, event_id=defined_messages.EventIds.UNKNOWN_ERROR)
utils.create_message(
self.ctxt, event_id=defined_messages.UNABLE_TO_ALLOCATE)
self.ctxt, event_id=defined_messages.EventIds.UNABLE_TO_ALLOCATE)
utils.create_message(
self.ctxt, event_id=defined_messages.ATTACH_READONLY_VOLUME)
self.ctxt,
event_id=defined_messages.EventIds.ATTACH_READONLY_VOLUME)
utils.create_message(
self.ctxt, event_id=defined_messages.IMAGE_FROM_VOLUME_OVER_QUOTA)
self.ctxt,
event_id=defined_messages.EventIds.IMAGE_FROM_VOLUME_OVER_QUOTA)
def test_get_all_messages_with_limit(self):
self.create_message_for_tests()
url = ('/v3/messages?limit=1')
url = '/v3/messages?limit=1'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -131,7 +133,7 @@ class MessageApiTest(test.TestCase):
res = self.controller.index(req)
self.assertEqual(1, len(res['messages']))
url = ('/v3/messages?limit=3')
url = '/v3/messages?limit=3'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -145,7 +147,7 @@ class MessageApiTest(test.TestCase):
def test_get_all_messages_with_limit_wrong_version(self):
self.create_message_for_tests()
url = ('/v3/messages?limit=1')
url = '/v3/messages?limit=1'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -159,7 +161,7 @@ class MessageApiTest(test.TestCase):
def test_get_all_messages_with_offset(self):
self.create_message_for_tests()
url = ('/v3/messages?offset=1')
url = '/v3/messages?offset=1'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -173,7 +175,7 @@ class MessageApiTest(test.TestCase):
def test_get_all_messages_with_limit_and_offset(self):
self.create_message_for_tests()
url = ('/v3/messages?limit=2&offset=1')
url = '/v3/messages?limit=2&offset=1'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -187,8 +189,8 @@ class MessageApiTest(test.TestCase):
def test_get_all_messages_with_filter(self):
self.create_message_for_tests()
url = ('/v3/messages?'
'event_id=%s') % defined_messages.UNKNOWN_ERROR
url = '/v3/messages?event_id=%s' % (
defined_messages.EventIds.UNKNOWN_ERROR)
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -202,7 +204,7 @@ class MessageApiTest(test.TestCase):
def test_get_all_messages_with_sort(self):
self.create_message_for_tests()
url = ('/v3/messages?sort=event_id:asc')
url = '/v3/messages?sort=event_id:asc'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -212,10 +214,12 @@ class MessageApiTest(test.TestCase):
res = self.controller.index(req)
expect_result = [defined_messages.UNKNOWN_ERROR,
defined_messages.UNABLE_TO_ALLOCATE,
defined_messages.IMAGE_FROM_VOLUME_OVER_QUOTA,
defined_messages.ATTACH_READONLY_VOLUME]
expect_result = [
defined_messages.EventIds.UNKNOWN_ERROR,
defined_messages.EventIds.UNABLE_TO_ALLOCATE,
defined_messages.EventIds.IMAGE_FROM_VOLUME_OVER_QUOTA,
defined_messages.EventIds.ATTACH_READONLY_VOLUME
]
expect_result.sort()
self.assertEqual(4, len(res['messages']))
@ -232,7 +236,7 @@ class MessageApiTest(test.TestCase):
self.create_message_for_tests()
# first request of this test
url = ('/v3/fake/messages?limit=2')
url = '/v3/fake/messages?limit=2'
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'
req.content_type = 'application/json'
@ -252,7 +256,7 @@ class MessageApiTest(test.TestCase):
# Second request in this test
# Test for second page using marker (res['messages][0]['id'])
# values fetched in first request with limit 2 in this test
url = ('/v3/fake/messages?limit=1&marker=%s') % (
url = '/v3/fake/messages?limit=1&marker=%s' % (
res['messages'][0]['id'])
req = fakes.HTTPRequest.blank(url)
req.method = 'GET'

View File

@ -0,0 +1,46 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from cinder.message import defined_messages
from cinder import test
CONF = cfg.CONF
class DefinedMessagesTest(test.TestCase):
def test_event_id_formats(self):
"""Assert all cinder event ids start with VOLUME_."""
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
self.assertTrue(value.startswith('VOLUME_'))
def test_unique_event_ids(self):
"""Assert that no event_id is duplicated."""
event_ids = []
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
event_ids.append(value)
self.assertEqual(len(event_ids), len(set(event_ids)))
def test_event_id_has_message(self):
for attr_name in dir(defined_messages.EventIds):
if not attr_name.startswith('_'):
value = getattr(defined_messages.EventIds, attr_name)
self.assertTrue(defined_messages.event_id_message_map.get(
value))
def test_event_id_missing_prefix(self):
self.assertTrue(defined_messages.get_message_text('000001'))

View File

@ -149,7 +149,7 @@ class SchedulerManagerTestCase(test.TestCase):
request_spec_obj, {})
_mock_message_create.assert_called_once_with(
self.context, defined_messages.UNABLE_TO_ALLOCATE,
self.context, defined_messages.EventIds.UNABLE_TO_ALLOCATE,
self.context.project_id, resource_type='VOLUME',
resource_uuid=volume.id)

View File

@ -1988,6 +1988,93 @@ class DBAPIMigrateCGstoGroupsTestCase(BaseTest):
self._assert_migrated(migrated, ())
class DBAPIMigrateMessagePrefixTestCase(BaseTest):
"""Tests for cinder.db.api.migrate_add_message_prefix."""
def setUp(self):
super(DBAPIMigrateMessagePrefixTestCase, self).setUp()
message_values = {
"project_id": "fake_project",
"event_id": "test_id",
"message_level": "ERROR",
"id": '1',
}
db.message_create(self.ctxt, message_values)
message_2_values = {
"project_id": "fake_project",
"event_id": "test_id",
"message_level": "ERROR",
"id": '2',
}
db.message_create(self.ctxt, message_2_values)
message_3_values = {
"project_id": "fake_project",
"event_id": "VOLUME_test_id",
"message_level": "ERROR",
"id": '3',
}
db.message_create(self.ctxt, message_3_values)
def tearDown(self):
super(DBAPIMigrateMessagePrefixTestCase, self).tearDown()
db.message_destroy(self.ctxt, {'id': '1'})
db.message_destroy(self.ctxt, {'id': '2'})
db.message_destroy(self.ctxt, {'id': '3'})
def _assert_migrated(self, migrated, not_migrated):
for message_id in migrated:
message = db.message_get(self.ctxt, message_id)
self.assertEqual('VOLUME_test_id', message['event_id'])
for message_id in not_migrated:
message = db.message_get(self.ctxt, message_id)
self.assertEqual('test_id', message['event_id'])
def test_migrate(self):
self._assert_migrated(['3'], ['1', '2'])
# Run migration
count_all, count_hit = db.migrate_add_message_prefix(self.ctxt, 50)
# Check counted entries
self.assertEqual(2, count_all)
self.assertEqual(2, count_hit)
self._assert_migrated(['1', '2', '3'], [])
def test_migrate_limit_force(self):
# Run first migration
count_all, count_hit = db.migrate_add_message_prefix(self.ctxt, 1,
True)
# Check counted entries
self.assertEqual(1, count_all)
self.assertEqual(1, count_hit)
self._assert_migrated(['1', '3'], ['2'])
# Run second migration
count_all, count_hit = db.migrate_add_message_prefix(self.ctxt, 2,
True)
# Check counted entries
self.assertEqual(1, count_all)
self.assertEqual(1, count_hit)
self._assert_migrated(['1', '2', '3'], [])
# Run final migration
count_all, count_hit = db.migrate_add_message_prefix(self.ctxt, 2,
True)
# Check counted entries
self.assertEqual(0, count_all)
self.assertEqual(0, count_hit)
class DBAPICgsnapshotTestCase(BaseTest):
"""Tests for cinder.db.api.cgsnapshot_*."""

View File

@ -114,6 +114,9 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
# NOTE(dulek): 73 drops tables and columns we've stopped using a
# release ago.
73,
# NOTE(ameade): 87 sets messages.request_id to nullable. This
# should be safe for the same reason as migration 66.
87,
]
# NOTE(dulek): We only started requiring things be additive in
@ -1063,6 +1066,16 @@ class MigrationsMixin(test_migrations.WalkVersionsMixin):
self.assertIsNotNone(specs)
self.assertEqual('<is> True', specs.value)
def _check_087(self, engine, data):
"""Test request_id column in messages is nullable."""
self.assertTrue(engine.dialect.has_table(engine.connect(),
"messages"))
messages = db_utils.get_table(engine, 'messages')
self.assertIsInstance(messages.c.request_id.type,
self.VARCHAR_TYPE)
self.assertTrue(messages.c.request_id.nullable)
def test_walk_versions(self):
self.walk_versions(False, False)

View File

@ -2943,7 +2943,7 @@ class VolumeTestCase(base.BaseVolumeTestCase):
# Assert a user message was created
self.volume.message_api.create.assert_called_once_with(
self.context, defined_messages.ATTACH_READONLY_VOLUME,
self.context, defined_messages.EventIds.ATTACH_READONLY_VOLUME,
self.context.project_id, resource_type=resource_types.VOLUME,
resource_uuid=volume['id'])
@ -5874,7 +5874,8 @@ class CopyVolumeToImageTestCase(base.BaseVolumeTestCase):
self.image_meta)
# Assert a user message was created
self.volume.message_api.create.assert_called_once_with(
self.context, defined_messages.IMAGE_FROM_VOLUME_OVER_QUOTA,
self.context,
defined_messages.EventIds.IMAGE_FROM_VOLUME_OVER_QUOTA,
self.context.project_id, resource_type=resource_types.VOLUME,
resource_uuid=volume['id'])

View File

@ -1028,7 +1028,7 @@ class VolumeManager(manager.CleanableManager,
fields.VolumeAttachStatus.ERROR_ATTACHING)
attachment.save()
self.message_api.create(
context, defined_messages.ATTACH_READONLY_VOLUME,
context, defined_messages.EventIds.ATTACH_READONLY_VOLUME,
context.project_id, resource_type=resource_types.VOLUME,
resource_uuid=volume.id)
raise exception.InvalidVolumeAttachMode(mode=mode,
@ -1349,7 +1349,8 @@ class VolumeManager(manager.CleanableManager,
payload['message'] = six.text_type(error)
if isinstance(error, exception.ImageLimitExceeded):
self.message_api.create(
context, defined_messages.IMAGE_FROM_VOLUME_OVER_QUOTA,
context,
defined_messages.EventIds.IMAGE_FROM_VOLUME_OVER_QUOTA,
context.project_id,
resource_type=resource_types.VOLUME,
resource_uuid=volume_id)

View File

@ -23,7 +23,7 @@ Example message generation::
self.message_api.create(
context,
defined_messages.UNABLE_TO_ALLOCATE,
defined_messages.EventIds.UNABLE_TO_ALLOCATE,
project_id,
resource_type=resource_types.VOLUME,
resource_uuid=volume_id)