Merge "Update Duplicate entry exceptions to provide more information"

This commit is contained in:
Zuul 2018-04-13 21:57:21 +00:00 committed by Gerrit Code Review
commit 4abae22742
3 changed files with 103 additions and 29 deletions

View File

@ -367,9 +367,10 @@ def create_workbook(values, session=None):
try: try:
wb.save(session=session) wb.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for WorkbookDefinition: %s" % e.columns "Duplicate entry for WorkbookDefinition ['name', 'project_id']: "
"{}, {}".format(wb.name, wb.project_id)
) )
return wb return wb
@ -487,11 +488,11 @@ def create_workflow_definition(values, session=None):
try: try:
wf_def.save(session=session) wf_def.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for WorkflowDefinition: %s" % e.columns "Duplicate entry for WorkflowDefinition ['name', 'namespace',"
) " 'project_id']: {}, {}, {}".format(wf_def.name, wf_def.namespace,
wf_def.project_id))
return wf_def return wf_def
@ -620,9 +621,10 @@ def create_action_definition(values, session=None):
try: try:
a_def.save(session=session) a_def.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for action %s: %s" % (a_def.name, e.columns) "Duplicate entry for Action ['name', 'project_id']:"
" {}, {}".format(a_def.name, a_def.project_id)
) )
return a_def return a_def
@ -691,7 +693,7 @@ def create_action_execution(values, session=None):
a_ex.save(session=session) a_ex.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry as e:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for ActionExecution: %s" % e.columns "Duplicate entry for ActionExecution ID: {}".format(e.value)
) )
return a_ex return a_ex
@ -774,7 +776,7 @@ def create_workflow_execution(values, session=None):
wf_ex.save(session=session) wf_ex.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry as e:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for WorkflowExecution with ID {value} ".format( "Duplicate entry for WorkflowExecution with ID: {value} ".format(
value=e.value value=e.value
) )
) )
@ -916,7 +918,7 @@ def create_task_execution(values, session=None):
task_ex.save(session=session) task_ex.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry as e:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for TaskExecution: %s" % e.columns "Duplicate entry for TaskExecution ID: {}".format(e.value)
) )
return task_ex return task_ex
@ -972,7 +974,7 @@ def create_delayed_call(values, session=None):
delayed_call.save(session) delayed_call.save(session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry as e:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for DelayedCall: %s" % e.columns "Duplicate entry for DelayedCall ID: {}".format(e.value)
) )
return delayed_call return delayed_call
@ -1157,10 +1159,10 @@ def create_cron_trigger(values, session=None):
try: try:
cron_trigger.save(session=session) cron_trigger.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for cron trigger %s: %s" "Duplicate entry for cron trigger ['name', 'project_id']: "
% (cron_trigger.name, e.columns) "{}, {}".format(cron_trigger.name, cron_trigger.project_id)
) )
# TODO(nmakhotkin): Remove this 'except' after fixing # TODO(nmakhotkin): Remove this 'except' after fixing
# https://bugs.launchpad.net/oslo.db/+bug/1458583. # https://bugs.launchpad.net/oslo.db/+bug/1458583.
@ -1270,9 +1272,10 @@ def create_environment(values, session=None):
try: try:
env.save(session=session) env.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for Environment: %s" % e.columns "Duplicate entry for Environment ['name', 'project_id']:"
" {}, {}".format(env.name, env.project_id)
) )
return env return env
@ -1358,9 +1361,13 @@ def create_resource_member(values, session=None):
try: try:
res_member.save(session=session) res_member.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for ResourceMember: %s" % e.columns "Duplicate entry for ResourceMember ['resource_id',"
" 'resource_type', 'member_id']: {}, {}, "
"{}".format(res_member.resource_id,
res_member.resource_type,
res_member.member_id)
) )
return res_member return res_member
@ -1503,11 +1510,15 @@ def create_event_trigger(values, session=None):
try: try:
event_trigger.save(session=session) event_trigger.save(session=session)
except db_exc.DBDuplicateEntry as e: except db_exc.DBDuplicateEntry:
raise exc.DBDuplicateEntryError( raise exc.DBDuplicateEntryError(
"Duplicate entry for event trigger %s: %s" "Duplicate entry for EventTrigger ['exchange', 'topic',"
% (event_trigger.id, e.columns) " 'event', 'workflow_id', 'project_id']:"
) " {}, {}, {}, {}, {}".format(event_trigger.exchange,
event_trigger.topic,
event_trigger.event,
event_trigger.workflow_id,
event_trigger.project_id))
# TODO(nmakhotkin): Remove this 'except' after fixing # TODO(nmakhotkin): Remove this 'except' after fixing
# https://bugs.launchpad.net/oslo.db/+bug/1458583. # https://bugs.launchpad.net/oslo.db/+bug/1458583.
except db_exc.DBError as e: except db_exc.DBError as e:

View File

@ -108,6 +108,21 @@ class BaseTest(base.BaseTestCase):
# Added for convenience (to avoid unnecessary imports). # Added for convenience (to avoid unnecessary imports).
register_action_class(name, cls, attributes, desc) register_action_class(name, cls, attributes, desc)
def assertRaisesWithMessage(self, exception, msg, func, *args, **kwargs):
try:
func(*args, **kwargs)
self.assertFail()
except exception as e:
self.assertEqual(msg, e.message)
def assertRaisesWithMessageContaining(self, exception, msg, func, *args,
**kwargs):
try:
func(*args, **kwargs)
self.assertFail()
except exception as e:
self.assertIn(msg, e.message)
def assertListEqual(self, l1, l2): def assertListEqual(self, l1, l2):
if tuple(sys.version_info)[0:2] < (2, 7): if tuple(sys.version_info)[0:2] < (2, 7):
# for python 2.6 compatibility # for python 2.6 compatibility

View File

@ -88,8 +88,10 @@ class WorkbookTest(SQLAlchemyTest):
cfg.CONF.set_default('auth_enable', False, group='pecan') cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_workbook(WORKBOOKS[0]) db_api.create_workbook(WORKBOOKS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for WorkbookDefinition ['name', 'project_id']:"
" my_workbook1, <default-project>",
db_api.create_workbook, db_api.create_workbook,
WORKBOOKS[0] WORKBOOKS[0]
) )
@ -415,6 +417,17 @@ WF_DEFINITIONS = [
'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0), 'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0),
'namespace': '' 'namespace': ''
}, },
{
'name': 'my_wf3',
'definition': 'empty',
'spec': {},
'tags': ['mc'],
'scope': 'private',
'project_id': '1233',
'trust_id': '12345',
'created_at': datetime.datetime(2016, 12, 1, 15, 1, 0),
'namespace': 'mynamespace'
},
] ]
@ -602,12 +615,26 @@ class WorkflowDefinitionTest(SQLAlchemyTest):
cfg.CONF.set_default('auth_enable', False, group='pecan') cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_workflow_definition(WF_DEFINITIONS[0]) db_api.create_workflow_definition(WF_DEFINITIONS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for WorkflowDefinition ['name', 'namespace',"
" 'project_id']: my_wf1, , <default-project>",
db_api.create_workflow_definition, db_api.create_workflow_definition,
WF_DEFINITIONS[0] WF_DEFINITIONS[0]
) )
def test_create_workflow_definition_duplicate_namespace_without_auth(self):
cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_workflow_definition(WF_DEFINITIONS[2])
self.assertRaisesWithMessage(
exc.DBDuplicateEntryError,
"Duplicate entry for WorkflowDefinition ['name', 'namespace',"
" 'project_id']: my_wf3, mynamespace, <default-project>",
db_api.create_workflow_definition,
WF_DEFINITIONS[2]
)
def test_update_workflow_definition(self): def test_update_workflow_definition(self):
created = db_api.create_workflow_definition(WF_DEFINITIONS[0]) created = db_api.create_workflow_definition(WF_DEFINITIONS[0])
@ -989,8 +1016,10 @@ class ActionDefinitionTest(SQLAlchemyTest):
cfg.CONF.set_default('auth_enable', False, group='pecan') cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_action_definition(ACTION_DEFINITIONS[0]) db_api.create_action_definition(ACTION_DEFINITIONS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for Action ['name', 'project_id']: action1"
", <default-project>",
db_api.create_action_definition, db_api.create_action_definition,
ACTION_DEFINITIONS[0] ACTION_DEFINITIONS[0]
) )
@ -2274,8 +2303,10 @@ class CronTriggerTest(SQLAlchemyTest):
cfg.CONF.set_default('auth_enable', False, group='pecan') cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_cron_trigger(CRON_TRIGGERS[0]) db_api.create_cron_trigger(CRON_TRIGGERS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for cron trigger ['name', 'project_id']:"
" trigger1, <default-project>",
db_api.create_cron_trigger, db_api.create_cron_trigger,
CRON_TRIGGERS[0] CRON_TRIGGERS[0]
) )
@ -2506,8 +2537,10 @@ class EnvironmentTest(SQLAlchemyTest):
cfg.CONF.set_default('auth_enable', False, group='pecan') cfg.CONF.set_default('auth_enable', False, group='pecan')
db_api.create_environment(ENVIRONMENTS[0]) db_api.create_environment(ENVIRONMENTS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for Environment ['name', 'project_id']: "
"env1, None",
db_api.create_environment, db_api.create_environment,
ENVIRONMENTS[0] ENVIRONMENTS[0]
) )
@ -2787,8 +2820,11 @@ class ResourceMemberTest(SQLAlchemyTest):
def test_create_resource_member_duplicate(self): def test_create_resource_member_duplicate(self):
db_api.create_resource_member(RESOURCE_MEMBERS[0]) db_api.create_resource_member(RESOURCE_MEMBERS[0])
self.assertRaises( self.assertRaisesWithMessage(
exc.DBDuplicateEntryError, exc.DBDuplicateEntryError,
"Duplicate entry for ResourceMember ['resource_id',"
" 'resource_type', 'member_id']:"
" 123e4567-e89b-12d3-a456-426655440000, workflow, 99-88-33",
db_api.create_resource_member, db_api.create_resource_member,
RESOURCE_MEMBERS[0] RESOURCE_MEMBERS[0]
) )
@ -3060,6 +3096,18 @@ class EventTriggerTest(SQLAlchemyTest):
self.assertEqual(created, fetched) self.assertEqual(created, fetched)
def test_create_event_trigger_duplicate(self):
db_api.create_event_trigger(EVENT_TRIGGERS[0])
self.assertRaisesWithMessageContaining(
exc.DBDuplicateEntryError,
"Duplicate entry for EventTrigger ['exchange', 'topic', 'event',"
" 'workflow_id', 'project_id']: openstack, notification,"
" compute.create_instance,",
db_api.create_event_trigger,
EVENT_TRIGGERS[0]
)
def test_get_event_triggers_not_insecure(self): def test_get_event_triggers_not_insecure(self):
for t in EVENT_TRIGGERS: for t in EVENT_TRIGGERS:
db_api.create_event_trigger(t) db_api.create_event_trigger(t)