Merge "Retry on DBDeadlock"
This commit is contained in:
commit
7642d1dba0
@ -16,6 +16,7 @@ import datetime
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_db import api as oslo_db_api
|
||||||
from oslo_db.sqlalchemy import session as db_session
|
from oslo_db.sqlalchemy import session as db_session
|
||||||
from oslo_db.sqlalchemy import utils
|
from oslo_db.sqlalchemy import utils
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
@ -550,6 +551,8 @@ def stack_delete(context, stack_id):
|
|||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
|
|
||||||
|
@oslo_db_api.wrap_db_retry(max_retries=3, retry_on_deadlock=True,
|
||||||
|
retry_interval=0.5, inc_retry_interval=True)
|
||||||
def stack_lock_create(stack_id, engine_id):
|
def stack_lock_create(stack_id, engine_id):
|
||||||
session = get_session()
|
session = get_session()
|
||||||
with session.begin():
|
with session.begin():
|
||||||
@ -1154,6 +1157,8 @@ def sync_point_delete_all_by_stack_and_traversal(context, stack_id,
|
|||||||
return rows_deleted
|
return rows_deleted
|
||||||
|
|
||||||
|
|
||||||
|
@oslo_db_api.wrap_db_retry(max_retries=3, retry_on_deadlock=True,
|
||||||
|
retry_interval=0.5, inc_retry_interval=True)
|
||||||
def sync_point_create(context, values):
|
def sync_point_create(context, values):
|
||||||
values['entity_id'] = str(values['entity_id'])
|
values['entity_id'] = str(values['entity_id'])
|
||||||
sync_point_ref = models.SyncPoint()
|
sync_point_ref = models.SyncPoint()
|
||||||
|
@ -13,11 +13,13 @@
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
import mox
|
import mox
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_db import exception as db_exception
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
import six
|
import six
|
||||||
|
|
||||||
@ -2201,6 +2203,14 @@ class DBAPIStackLockTest(common.HeatTestCase):
|
|||||||
observed = db_api.stack_lock_release(self.stack.id, UUID2)
|
observed = db_api.stack_lock_release(self.stack.id, UUID2)
|
||||||
self.assertTrue(observed)
|
self.assertTrue(observed)
|
||||||
|
|
||||||
|
@mock.patch.object(time, 'sleep')
|
||||||
|
def test_stack_lock_retry_on_deadlock(self, sleep):
|
||||||
|
with mock.patch('sqlalchemy.orm.Session.add',
|
||||||
|
side_effect=db_exception.DBDeadlock) as mock_add:
|
||||||
|
self.assertRaises(db_exception.DBDeadlock,
|
||||||
|
db_api.stack_lock_create, self.stack.id, UUID1)
|
||||||
|
self.assertEqual(4, mock_add.call_count)
|
||||||
|
|
||||||
|
|
||||||
class DBAPIResourceDataTest(common.HeatTestCase):
|
class DBAPIResourceDataTest(common.HeatTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -2835,6 +2845,18 @@ class DBAPISyncPointTest(common.HeatTestCase):
|
|||||||
)
|
)
|
||||||
self.assertIsNone(ret_sync_point_stack)
|
self.assertIsNone(ret_sync_point_stack)
|
||||||
|
|
||||||
|
@mock.patch.object(time, 'sleep')
|
||||||
|
def test_syncpoint_create_deadlock(self, sleep):
|
||||||
|
with mock.patch('sqlalchemy.orm.Session.add',
|
||||||
|
side_effect=db_exception.DBDeadlock) as add:
|
||||||
|
for res in self.resources:
|
||||||
|
self.assertRaises(db_exception.DBDeadlock,
|
||||||
|
create_sync_point,
|
||||||
|
self.ctx, entity_id=str(res.id),
|
||||||
|
stack_id=self.stack.id,
|
||||||
|
traversal_id=self.stack.current_traversal)
|
||||||
|
self.assertEqual(len(self.resources) * 4, add.call_count)
|
||||||
|
|
||||||
|
|
||||||
class DBAPICryptParamsPropsTest(common.HeatTestCase):
|
class DBAPICryptParamsPropsTest(common.HeatTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user