Merge pull request #475 from asadoughi/concurrent_subnet_alloc_pool_cache

Fix subnet_update_set_alloc_pool_cache for concurrent requests
This commit is contained in:
John Perkins
2015-11-17 13:33:23 -06:00
2 changed files with 42 additions and 3 deletions

View File

@@ -701,9 +701,13 @@ def subnet_update_set_full(context, subnet):
def subnet_update_set_alloc_pool_cache(context, subnet, cache_data=None):
if cache_data is not None:
cache_data = json.dumps(cache_data)
subnet["_allocation_pool_cache"] = cache_data
subnet = subnet_update(context, subnet)
LOG.debug("Setting alloc pool cache to %s" % cache_data)
update_kwargs = {"_allocation_pool_cache": cache_data}
query = context.session.query(models.Subnet)
query = query.filter(models.Subnet.id == subnet.id)
row_count = query.update(update_kwargs,
update_args={"mysql_limit": 1})
LOG.debug("Setting alloc pool cache to %s (row_count: %s)" % (
cache_data, row_count))
return subnet

View File

@@ -0,0 +1,35 @@
from sqlalchemy.orm import exc
from quark.db import api as db_api
from quark.tests.functional.mysql.base import MySqlBaseFunctionalTest
class TestSubnetsAllocationPoolCache(MySqlBaseFunctionalTest):
def setUp(self):
super(TestSubnetsAllocationPoolCache, self).setUp()
def test_subnet_update_set_alloc_pool_cache_concurrency(self):
subnet = {"cidr": "192.168.10.0/24"}
subnet_db = db_api.subnet_create(self.context, **subnet)
self.context.session.flush()
# establish second session
old_session = self.context.session
self.context._session = None
subnet_to_delete = db_api.subnet_find(
self.context, id=subnet_db.id, scope=db_api.ONE)
db_api.subnet_delete(self.context, subnet_to_delete)
self.context.session.flush()
# restore first session
self.context._session = old_session
try:
db_api.subnet_update_set_alloc_pool_cache(
self.context, subnet_db, {"foo": "bar"})
self.context.session.flush()
except exc.StaleDataError as e:
self.fail("Did not expect StaleDataError exception: {0}".format(e))
self.assertEqual(subnet_db["_allocation_pool_cache"],
"{\"foo\": \"bar\"}")