Fix scenario tests issue with immutable LBs
Wait for the loadbalancer to become ACTIVE after deleting an healthmonitor or a listener, to ensure the next test uses a LB in a non-transitional state. This also fixes the cidrs tests that were using the wrong waiter timeouts. This patch also moves individual deletion of children resources in class tear down to a more performant way by cascade deleting the load balancer. Story: 2008219 Task: 41008 Depends-On: https://review.opendev.org/#/c/757604/ Depends-On: https://review.opendev.org/#/c/757840/ Depends-On: https://review.opendev.org/#/c/757841/ Depends-On: https://review.opendev.org/#/c/757842/ Co-Authored-By: Carlos Goncalves <cgoncalves@redhat.com> Change-Id: I6a4eed7269e4f502bd0fc8613cb4ec4da13890e7
This commit is contained in:
parent
67249477d8
commit
3bc1f4decd
|
@ -320,7 +320,8 @@ class BaseLBaaSClient(rest_client.RestClient):
|
|||
self.expected_success(204, response.status)
|
||||
return response.status
|
||||
|
||||
def _cleanup_obj(self, obj_id, lb_client=None, lb_id=None, parent_id=None):
|
||||
def _cleanup_obj(self, obj_id, lb_client=None, lb_id=None, parent_id=None,
|
||||
cascade=False):
|
||||
"""Clean up an object (for use in tempest addClassResourceCleanup).
|
||||
|
||||
We always need to wait for the parent LB to be in a mutable state
|
||||
|
@ -343,6 +344,8 @@ class BaseLBaaSClient(rest_client.RestClient):
|
|||
loadbalancer client already.
|
||||
:param lb_id: (Optional) The ID of the parent loadbalancer, if the main
|
||||
obj_id is for a sub-object and not a loadbalancer.
|
||||
:param cascade: If true will delete all child objects of an
|
||||
object, if that object supports it.
|
||||
:return:
|
||||
"""
|
||||
if parent_id:
|
||||
|
@ -380,8 +383,8 @@ class BaseLBaaSClient(rest_client.RestClient):
|
|||
waiters.wait_for_status(wait_func, wait_id,
|
||||
const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
self.build_interval,
|
||||
self.timeout)
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
except exceptions.UnexpectedResponseCode:
|
||||
# Status is ERROR, go ahead with deletion
|
||||
LOG.debug("Found %s %s in ERROR status, proceeding with cleanup.",
|
||||
|
@ -400,7 +403,10 @@ class BaseLBaaSClient(rest_client.RestClient):
|
|||
LOG.error("Cleanup encountered an unknown exception while waiting "
|
||||
"for %s %s: %s", wait_client.root_tag, wait_id, e)
|
||||
|
||||
uri = '{0}/{1}'.format(uri, obj_id)
|
||||
if cascade:
|
||||
uri = '{0}/{1}?cascade=true'.format(uri, obj_id)
|
||||
else:
|
||||
uri = '{0}/{1}'.format(uri, obj_id)
|
||||
LOG.info("Cleaning up %s %s...", self.root_tag, obj_id)
|
||||
return_status = test_utils.call_and_ignore_notfound_exc(
|
||||
self.delete, uri)
|
||||
|
@ -411,8 +417,8 @@ class BaseLBaaSClient(rest_client.RestClient):
|
|||
waiters.wait_for_status(wait_func, wait_id,
|
||||
const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
self.build_interval,
|
||||
self.timeout)
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
else:
|
||||
LOG.info("Waiting for %s %s to be DELETED...",
|
||||
wait_client.root_tag, wait_id)
|
||||
|
|
|
@ -54,7 +54,7 @@ class AmphoraAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -48,7 +48,7 @@ class HealthMonitorAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -45,7 +45,7 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -62,10 +62,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.listener_id = listener[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener,
|
||||
cls.listener_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -83,10 +79,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
|
||||
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
|
||||
cls.pool_id = pool[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool,
|
||||
cls.pool_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -150,10 +142,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
**l7policy_kwargs)
|
||||
|
||||
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_l7policy_client.cleanup_l7policy,
|
||||
l7policy[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -560,10 +548,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
|
||||
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_l7policy_client.cleanup_l7policy,
|
||||
l7policy[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -684,10 +668,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
})
|
||||
|
||||
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_l7policy_client.cleanup_l7policy,
|
||||
l7policy[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -832,10 +812,6 @@ class L7PolicyAPITest(test_base.LoadBalancerBaseTest):
|
|||
const.ACTION: const.REJECT,
|
||||
}
|
||||
l7policy = self.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_l7policy_client.cleanup_l7policy,
|
||||
l7policy[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer,
|
||||
|
|
|
@ -47,7 +47,7 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -152,10 +152,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
**listener_kwargs)
|
||||
|
||||
listener = self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -272,8 +268,7 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
|
||||
try:
|
||||
listener = self.mem_listener_client.create_listener(
|
||||
**listener_kwargs)
|
||||
self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
except exceptions.BadRequest as e:
|
||||
faultstring = e.resp_body.get('faultstring', '')
|
||||
if ("Invalid input for field/attribute protocol." in faultstring
|
||||
|
@ -281,11 +276,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
raise self.skipException("Skipping unsupported protocol")
|
||||
raise e
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -305,8 +295,7 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
|
||||
try:
|
||||
listener2 = self.mem_listener_client.create_listener(
|
||||
**listener2_kwargs)
|
||||
self.mem_listener_client.create_listener(**listener2_kwargs)
|
||||
except exceptions.BadRequest as e:
|
||||
faultstring = e.resp_body.get('faultstring', '')
|
||||
if ("Invalid input for field/attribute protocol." in faultstring
|
||||
|
@ -314,11 +303,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
raise self.skipException("Skipping unsupported protocol")
|
||||
raise e
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener2[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -741,10 +725,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
listener_kwargs.update({const.ALLOWED_CIDRS: self.allowed_cidrs})
|
||||
|
||||
listener = self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -910,10 +890,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
listener_kwargs.update({const.ALLOWED_CIDRS: self.allowed_cidrs})
|
||||
|
||||
listener = self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -1138,10 +1114,6 @@ class ListenerAPITest(test_base.LoadBalancerBaseTest):
|
|||
const.LOADBALANCER_ID: self.lb_id,
|
||||
}
|
||||
listener = self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer,
|
||||
|
|
|
@ -61,7 +61,7 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -103,9 +103,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
cls.current_listener_port += 1
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener, listener[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -136,10 +133,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool, pool[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
|
@ -901,11 +894,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
|
||||
member = self.mem_member_client.create_member(**member_kwargs)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -1716,11 +1704,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
|
||||
member = self.mem_member_client.create_member(**member_kwargs)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -2173,11 +2156,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
|
||||
member = self.mem_member_client.create_member(**member_kwargs)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -2596,9 +2574,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
pool_id = pool[const.ID]
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool, pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id,
|
||||
|
@ -2631,11 +2606,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
const.ID]
|
||||
member1 = self.mem_member_client.create_member(**member1_kwargs)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member1[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id,
|
||||
const.PROVISIONING_STATUS,
|
||||
|
@ -2667,10 +2637,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
const.ID]
|
||||
|
||||
member2 = self.mem_member_client.create_member(**member2_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member2[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id,
|
||||
|
@ -2734,11 +2700,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
pool_id,
|
||||
query_params='{sort}={port}:{asc}'.format(
|
||||
sort=const.SORT, port=const.PROTOCOL_PORT, asc=const.ASC))
|
||||
for m in members:
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
m[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
# We should have two members: member2 and member3, in that order
|
||||
self.assertEqual(2, len(members))
|
||||
|
@ -2939,10 +2900,6 @@ class MemberAPITest(test_base.LoadBalancerBaseTest):
|
|||
const.PROTOCOL_PORT: self.member_port.increment(),
|
||||
}
|
||||
member = self.mem_member_client.create_member(**member_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member[const.ID], pool_id=pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer,
|
||||
|
|
|
@ -46,7 +46,7 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -358,10 +358,6 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
listener = self.mem_listener_client.create_listener(
|
||||
**listener_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -428,11 +424,6 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool,
|
||||
pool[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -1027,11 +1018,6 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool,
|
||||
pool[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -1276,11 +1262,6 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool,
|
||||
pool[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -1619,11 +1600,6 @@ class PoolAPITest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool,
|
||||
pool[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -186,7 +186,7 @@ class TLSWithBarbicanTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -219,10 +219,6 @@ class TLSWithBarbicanTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
}
|
||||
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
|
||||
cls.pool_id = pool[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool,
|
||||
cls.pool_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -242,12 +238,7 @@ class TLSWithBarbicanTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
if cls.lb_member_1_subnet:
|
||||
member1_kwargs[const.SUBNET_ID] = cls.lb_member_1_subnet[const.ID]
|
||||
|
||||
member1 = cls.mem_member_client.create_member(
|
||||
**member1_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_member_client.cleanup_member,
|
||||
member1[const.ID], pool_id=cls.pool_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
cls.mem_member_client.create_member(**member1_kwargs)
|
||||
waiters.wait_for_status(
|
||||
cls.mem_lb_client.show_loadbalancer, cls.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -266,12 +257,7 @@ class TLSWithBarbicanTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
if cls.lb_member_2_subnet:
|
||||
member2_kwargs[const.SUBNET_ID] = cls.lb_member_2_subnet[const.ID]
|
||||
|
||||
member2 = cls.mem_member_client.create_member(
|
||||
**member2_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_member_client.cleanup_member,
|
||||
member2[const.ID], pool_id=cls.pool_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
cls.mem_member_client.create_member(**member2_kwargs)
|
||||
waiters.wait_for_status(
|
||||
cls.mem_lb_client.show_loadbalancer, cls.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
|
|
@ -45,7 +45,7 @@ class HealthMonitorScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -268,9 +268,6 @@ class HealthMonitorScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
const.LOADBALANCER_ID: self.lb_id,
|
||||
}
|
||||
pool = self.mem_pool_client.create_pool(**pool_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool, pool[const.ID],
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -297,9 +294,6 @@ class HealthMonitorScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
const.EXPECTED_CODES: '200'})
|
||||
|
||||
hm = self.mem_healthmonitor_client.create_healthmonitor(**hm_kwargs)
|
||||
self.addCleanup(
|
||||
self.mem_healthmonitor_client.cleanup_healthmonitor,
|
||||
hm[const.ID], lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
|
@ -382,3 +376,9 @@ class HealthMonitorScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
const.PROVISIONING_STATUS,
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
|
|
@ -61,7 +61,7 @@ class IPv6TrafficOperationsScenarioTest(
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
cls.lb_vip_address = lb[const.VIP_ADDRESS]
|
||||
|
||||
|
@ -103,10 +103,6 @@ class IPv6TrafficOperationsScenarioTest(
|
|||
}
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.listener_ids[protocol] = listener[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener,
|
||||
cls.listener_ids[protocol],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -123,10 +119,6 @@ class IPv6TrafficOperationsScenarioTest(
|
|||
}
|
||||
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
|
||||
cls.pool_ids[protocol] = pool[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool,
|
||||
cls.pool_ids[protocol],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -43,7 +43,7 @@ class L7PolicyScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -60,10 +60,6 @@ class L7PolicyScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.listener_id = listener[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener,
|
||||
cls.listener_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -80,10 +76,6 @@ class L7PolicyScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
|
||||
cls.pool_id = pool[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool,
|
||||
cls.pool_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -43,7 +43,7 @@ class L7RuleScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -60,10 +60,6 @@ class L7RuleScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.listener_id = listener[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener,
|
||||
cls.listener_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -79,10 +75,6 @@ class L7RuleScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
l7policy = cls.mem_l7policy_client.create_l7policy(**l7policy_kwargs)
|
||||
cls.l7policy_id = l7policy[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_l7policy_client.cleanup_l7policy,
|
||||
cls.l7policy_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -46,7 +46,7 @@ class ListenerScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -75,9 +75,6 @@ class ListenerScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
pool1 = cls.mem_pool_client.create_pool(**pool1_kwargs)
|
||||
pool1_id = pool1[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool, pool1_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -94,9 +91,6 @@ class ListenerScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
pool2 = cls.mem_pool_client.create_pool(**pool2_kwargs)
|
||||
pool2_id = pool2[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool, pool2_id,
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -406,16 +400,16 @@ class ListenerScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
self.assertEqual(expected_cidrs, listener[const.ALLOWED_CIDRS])
|
||||
|
||||
# Listener delete
|
||||
self.mem_listener_client.delete_listener(listener[const.ID])
|
||||
waiters.wait_for_deleted_status_or_not_found(
|
||||
self.mem_listener_client.show_listener, listener[const.ID],
|
||||
const.PROVISIONING_STATUS,
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
self.mem_listener_client.delete_listener(listener[const.ID])
|
||||
|
||||
waiters.wait_for_deleted_status_or_not_found(
|
||||
self.mem_listener_client.show_listener, listener[const.ID],
|
||||
const.PROVISIONING_STATUS,
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
|
|
@ -64,7 +64,7 @@ class MemberScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -107,9 +107,6 @@ class MemberScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
}
|
||||
cls.current_listener_port += 1
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener, listener[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -136,10 +133,6 @@ class MemberScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
message = e.resp_body.get('faultstring', message)
|
||||
raise testtools.TestCase.skipException(message)
|
||||
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool, pool[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
|
|
|
@ -45,7 +45,7 @@ class PoolScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -358,9 +358,6 @@ class PoolScenarioTest(test_base.LoadBalancerBaseTest):
|
|||
listener = self.mem_listener_client.create_listener(
|
||||
**listener_kwargs)
|
||||
listener_id = listener[const.ID]
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener, listener_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
|
|
@ -64,7 +64,7 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
cls.lb_id = lb[const.ID]
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_lb_client.cleanup_loadbalancer,
|
||||
cls.lb_id)
|
||||
cls.lb_id, cascade=True)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -113,10 +113,6 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
const.CONNECTION_LIMIT: 200,
|
||||
}
|
||||
listener = cls.mem_listener_client.create_listener(**listener_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_listener_client.cleanup_listener,
|
||||
listener[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -132,10 +128,6 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
const.LISTENER_ID: listener[const.ID],
|
||||
}
|
||||
pool = cls.mem_pool_client.create_pool(**pool_kwargs)
|
||||
cls.addClassResourceCleanup(
|
||||
cls.mem_pool_client.cleanup_pool,
|
||||
pool[const.ID],
|
||||
lb_client=cls.mem_lb_client, lb_id=cls.lb_id)
|
||||
|
||||
waiters.wait_for_status(cls.mem_lb_client.show_loadbalancer,
|
||||
cls.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -716,10 +708,11 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
url_for_member1)
|
||||
|
||||
# Assert that slow traffic goes to pool2->member2
|
||||
# Increase timeout to cope with slow test systems.
|
||||
url_for_member2 = 'http://{}:{}/slow?delay=1s'.format(
|
||||
self.lb_vip_address, LISTENER_PORT)
|
||||
self.assertConsistentResponse((200, self.webserver2_response),
|
||||
url_for_member2)
|
||||
url_for_member2, timeout=3)
|
||||
|
||||
# Assert that /turtles is redirected to identity
|
||||
url_for_identity = 'http://{}:{}/turtles'.format(self.lb_vip_address,
|
||||
|
@ -1199,8 +1192,8 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
CONF.load_balancer.build_interval,
|
||||
CONF.load_balancer.build_timeout)
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
||||
pool_name = data_utils.rand_name("lb_member_pool3_cidrs")
|
||||
pool_kwargs = {
|
||||
|
@ -1233,8 +1226,8 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
CONF.load_balancer.build_interval,
|
||||
CONF.load_balancer.build_timeout)
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
||||
# Set up Member 1 for Webserver 1
|
||||
member1_name = data_utils.rand_name("lb_member_member1-cidrs-traffic")
|
||||
|
@ -1300,8 +1293,8 @@ class TrafficOperationsScenarioTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
const.ACTIVE,
|
||||
CONF.load_balancer.build_interval,
|
||||
CONF.load_balancer.build_timeout)
|
||||
CONF.load_balancer.check_interval,
|
||||
CONF.load_balancer.check_timeout)
|
||||
|
||||
# NOTE: Before we start with the consistent response check, we must
|
||||
# wait until Neutron completes the SG update.
|
||||
|
|
|
@ -74,7 +74,7 @@ class SparePoolTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
lb = self.mem_lb_client.create_loadbalancer(**lb_kwargs)
|
||||
self.lb_id = lb[const.ID]
|
||||
self.addClassResourceCleanup(self.mem_lb_client.cleanup_loadbalancer,
|
||||
self.lb_id)
|
||||
self.lb_id, cascade=True)
|
||||
|
||||
if CONF.validation.connect_method == 'floating':
|
||||
port_id = lb[const.VIP_PORT_ID]
|
||||
|
@ -114,10 +114,6 @@ class SparePoolTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
}
|
||||
listener = self.mem_listener_client.create_listener(**listener_kwargs)
|
||||
self.listener_id = listener[const.ID]
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_listener_client.cleanup_listener,
|
||||
self.listener_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -134,10 +130,6 @@ class SparePoolTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
}
|
||||
pool = self.mem_pool_client.create_pool(**pool_kwargs)
|
||||
self.pool_id = pool[const.ID]
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_pool_client.cleanup_pool,
|
||||
self.pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
|
||||
waiters.wait_for_status(self.mem_lb_client.show_loadbalancer,
|
||||
self.lb_id, const.PROVISIONING_STATUS,
|
||||
|
@ -157,12 +149,7 @@ class SparePoolTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
if self.lb_member_1_subnet:
|
||||
member1_kwargs[const.SUBNET_ID] = self.lb_member_1_subnet[const.ID]
|
||||
|
||||
member1 = self.mem_member_client.create_member(
|
||||
**member1_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member1[const.ID], pool_id=self.pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
self.mem_member_client.create_member(**member1_kwargs)
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
@ -181,12 +168,7 @@ class SparePoolTest(test_base.LoadBalancerBaseTestWithCompute):
|
|||
if self.lb_member_2_subnet:
|
||||
member2_kwargs[const.SUBNET_ID] = self.lb_member_2_subnet[const.ID]
|
||||
|
||||
member2 = self.mem_member_client.create_member(
|
||||
**member2_kwargs)
|
||||
self.addClassResourceCleanup(
|
||||
self.mem_member_client.cleanup_member,
|
||||
member2[const.ID], pool_id=self.pool_id,
|
||||
lb_client=self.mem_lb_client, lb_id=self.lb_id)
|
||||
self.mem_member_client.create_member(**member2_kwargs)
|
||||
waiters.wait_for_status(
|
||||
self.mem_lb_client.show_loadbalancer, self.lb_id,
|
||||
const.PROVISIONING_STATUS, const.ACTIVE,
|
||||
|
|
|
@ -78,6 +78,7 @@
|
|||
- ^etc/.*$
|
||||
- ^releasenotes/.*$
|
||||
vars:
|
||||
configure_swap_size: 8192
|
||||
devstack_localrc:
|
||||
TEMPEST_PLUGINS: /opt/stack/octavia-tempest-plugin
|
||||
USE_PYTHON3: true
|
||||
|
|
Loading…
Reference in New Issue