Switch Octavia API calls to openstacksdk
For a while we were using a dirty hack to call Octavia API - i.e. we were mocking python-neutronclient to use a session pointing to Octavia endpoint. This was a workaround to save some effort that was needed to introduce python-octaviaclient dependency. As Kuryr is strongly tied to Kubernetes version it is serving, we should try to limit its dependency on OpenStack versions. E.g. we should be able to cooperate with various versions of Octavia. openstacksdk is a tool that is designed to do exactly that, adding abstraction layer that hides differences between APIs and as last resort allowing doing bare HTTP calls, while taking care of all the Keystone stuff for us. This commit removes the ugly workaround mentioned in first paragraph and switches all the LBaaS v2 calls to use openstacksdk's Octavia proxy. Also some leftovers from LBaaS v2 removal are cleaned up, e.g. logic behind not using cascade deletion or workaround for LbaaS v2 issues with pools. Implements: blueprint switch-to-openstacksdk Change-Id: Ic0bb56f90fe9effcdcb2ae5db96b8a1ec19738df
This commit is contained in:
parent
a0197124c0
commit
b90ca1b9b9
@ -16,23 +16,27 @@
|
||||
import os
|
||||
|
||||
from kuryr.lib import utils
|
||||
from openstack import connection
|
||||
|
||||
from kuryr_kubernetes import config
|
||||
from kuryr_kubernetes import k8s_client
|
||||
from neutronclient import client as n_client
|
||||
|
||||
_clients = {}
|
||||
_NEUTRON_CLIENT = 'neutron-client'
|
||||
_LB_CLIENT = 'load-balancer-client'
|
||||
_KUBERNETES_CLIENT = 'kubernetes-client'
|
||||
_OPENSTACKSDK = 'openstacksdk'
|
||||
|
||||
|
||||
def get_neutron_client():
|
||||
return _clients[_NEUTRON_CLIENT]
|
||||
|
||||
|
||||
def get_openstacksdk():
|
||||
return _clients[_OPENSTACKSDK]
|
||||
|
||||
|
||||
def get_loadbalancer_client():
|
||||
return _clients[_LB_CLIENT]
|
||||
return get_openstacksdk().load_balancer
|
||||
|
||||
|
||||
def get_kubernetes_client():
|
||||
@ -41,34 +45,14 @@ def get_kubernetes_client():
|
||||
|
||||
def setup_clients():
|
||||
setup_neutron_client()
|
||||
setup_loadbalancer_client()
|
||||
setup_kubernetes_client()
|
||||
setup_openstacksdk()
|
||||
|
||||
|
||||
def setup_neutron_client():
|
||||
_clients[_NEUTRON_CLIENT] = utils.get_neutron_client()
|
||||
|
||||
|
||||
def setup_loadbalancer_client():
|
||||
neutron_client = get_neutron_client()
|
||||
if any(ext['alias'] == 'lbaasv2' for
|
||||
ext in neutron_client.list_extensions()['extensions']):
|
||||
_clients[_LB_CLIENT] = neutron_client
|
||||
neutron_client.cascading_capable = False
|
||||
else:
|
||||
# Since Octavia is lbaasv2 API compatible (A superset of it) we'll just
|
||||
# wire an extra neutron client instance to point to it
|
||||
lbaas_client = utils.get_neutron_client()
|
||||
conf_group = utils.kuryr_config.neutron_group.name
|
||||
auth_plugin = utils.get_auth_plugin(conf_group)
|
||||
octo_httpclient = n_client.construct_http_client(
|
||||
session=utils.get_keystone_session(conf_group, auth_plugin),
|
||||
service_type='load-balancer')
|
||||
lbaas_client.httpclient = octo_httpclient
|
||||
_clients[_LB_CLIENT] = lbaas_client
|
||||
lbaas_client.cascading_capable = True
|
||||
|
||||
|
||||
def setup_kubernetes_client():
|
||||
if config.CONF.kubernetes.api_root:
|
||||
api_root = config.CONF.kubernetes.api_root
|
||||
@ -79,3 +63,10 @@ def setup_kubernetes_client():
|
||||
port = os.environ['KUBERNETES_SERVICE_PORT_HTTPS']
|
||||
api_root = "https://%s:%s" % (host, port)
|
||||
_clients[_KUBERNETES_CLIENT] = k8s_client.K8sClient(api_root)
|
||||
|
||||
|
||||
def setup_openstacksdk():
|
||||
auth_plugin = utils.get_auth_plugin('neutron')
|
||||
session = utils.get_keystone_session('neutron', auth_plugin)
|
||||
conn = connection.Connection(session=session)
|
||||
_clients[_OPENSTACKSDK] = conn
|
||||
|
@ -14,14 +14,12 @@
|
||||
# under the License.
|
||||
|
||||
import random
|
||||
import six
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import requests
|
||||
|
||||
from neutronclient.common import exceptions as n_exc
|
||||
from openstack import exceptions as o_exc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import timeutils
|
||||
@ -53,11 +51,6 @@ _LB_STS_POLL_SLOW_INTERVAL = 3
|
||||
class LBaaSv2Driver(base.LBaaSDriver):
|
||||
"""LBaaSv2Driver implements LBaaSDriver for Neutron LBaaSv2 API."""
|
||||
|
||||
@property
|
||||
def cascading_capable(self):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
return lbaas.cascading_capable
|
||||
|
||||
def get_service_loadbalancer_name(self, namespace, svc_name):
|
||||
return "%s/%s" % (namespace, svc_name)
|
||||
|
||||
@ -84,17 +77,12 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
def release_loadbalancer(self, loadbalancer):
|
||||
neutron = clients.get_neutron_client()
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
if lbaas.cascading_capable:
|
||||
self._release(
|
||||
loadbalancer,
|
||||
loadbalancer,
|
||||
lbaas.delete,
|
||||
lbaas.lbaas_loadbalancer_path % loadbalancer.id,
|
||||
params={'cascade': True})
|
||||
|
||||
else:
|
||||
self._release(loadbalancer, loadbalancer,
|
||||
lbaas.delete_loadbalancer, loadbalancer.id)
|
||||
self._release(
|
||||
loadbalancer,
|
||||
loadbalancer,
|
||||
lbaas.delete_load_balancer,
|
||||
loadbalancer.id,
|
||||
cascade=True)
|
||||
|
||||
sg_id = self._find_listeners_sg(loadbalancer)
|
||||
if sg_id:
|
||||
@ -394,7 +382,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
result = self._ensure_provisioned(
|
||||
loadbalancer, listener, self._create_listener,
|
||||
self._find_listener, _LB_STS_POLL_SLOW_INTERVAL)
|
||||
except n_exc.BadRequest:
|
||||
except o_exc.BadRequestException:
|
||||
LOG.info("Listener creation failed, most probably because "
|
||||
"protocol %(prot)s is not supported", {'prot': protocol})
|
||||
return None
|
||||
@ -449,9 +437,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
def release_pool(self, loadbalancer, pool):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
self._release(loadbalancer, pool,
|
||||
lbaas.delete_lbaas_pool,
|
||||
pool.id)
|
||||
self._release(loadbalancer, pool, lbaas.delete_pool, pool.id)
|
||||
|
||||
def ensure_member(self, loadbalancer, pool,
|
||||
subnet_id, ip, port, target_ref_namespace,
|
||||
@ -480,9 +466,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
def release_member(self, loadbalancer, member):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
self._release(loadbalancer, member,
|
||||
lbaas.delete_lbaas_member,
|
||||
member.id, member.pool_id)
|
||||
self._release(loadbalancer, member, lbaas.delete_member, member.id,
|
||||
member.pool_id)
|
||||
|
||||
def _get_vip_port(self, loadbalancer):
|
||||
neutron = clients.get_neutron_client()
|
||||
@ -502,71 +487,73 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
def _create_loadbalancer(self, loadbalancer):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
|
||||
request = {'loadbalancer': {
|
||||
request = {
|
||||
'name': loadbalancer.name,
|
||||
'project_id': loadbalancer.project_id,
|
||||
'vip_address': str(loadbalancer.ip),
|
||||
'vip_subnet_id': loadbalancer.subnet_id}}
|
||||
'vip_subnet_id': loadbalancer.subnet_id,
|
||||
}
|
||||
|
||||
if loadbalancer.provider is not None:
|
||||
request['loadbalancer']['provider'] = loadbalancer.provider
|
||||
request['provider'] = loadbalancer.provider
|
||||
|
||||
response = lbaas.create_loadbalancer(request)
|
||||
loadbalancer.id = response['loadbalancer']['id']
|
||||
response = lbaas.create_load_balancer(**request)
|
||||
loadbalancer.id = response['id']
|
||||
loadbalancer.port_id = self._get_vip_port(loadbalancer).get("id")
|
||||
if (loadbalancer.provider is not None and
|
||||
loadbalancer.provider != response['loadbalancer']['provider']):
|
||||
loadbalancer.provider != response['provider']):
|
||||
LOG.error("Request provider(%s) != Response provider(%s)",
|
||||
loadbalancer.provider,
|
||||
response['loadbalancer']['provider'])
|
||||
response['provider'])
|
||||
return None
|
||||
loadbalancer.provider = response['loadbalancer']['provider']
|
||||
loadbalancer.provider = response['provider']
|
||||
return loadbalancer
|
||||
|
||||
def _find_loadbalancer(self, loadbalancer):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_loadbalancers(
|
||||
response = lbaas.load_balancers(
|
||||
name=loadbalancer.name,
|
||||
project_id=loadbalancer.project_id,
|
||||
vip_address=str(loadbalancer.ip),
|
||||
vip_subnet_id=loadbalancer.subnet_id)
|
||||
|
||||
try:
|
||||
loadbalancer.id = response['loadbalancers'][0]['id']
|
||||
os_lb = next(response) # openstacksdk returns a generator
|
||||
loadbalancer.id = os_lb['id']
|
||||
loadbalancer.port_id = self._get_vip_port(loadbalancer).get("id")
|
||||
loadbalancer.provider = response['loadbalancers'][0]['provider']
|
||||
if (response['loadbalancers'][0]['provisioning_status'] ==
|
||||
'ERROR'):
|
||||
loadbalancer.provider = os_lb['provider']
|
||||
if os_lb['provisioning_status'] == 'ERROR':
|
||||
self.release_loadbalancer(loadbalancer)
|
||||
return None
|
||||
except (KeyError, IndexError):
|
||||
except (KeyError, StopIteration):
|
||||
return None
|
||||
|
||||
return loadbalancer
|
||||
|
||||
def _create_listener(self, listener):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.create_listener({'listener': {
|
||||
'name': listener.name,
|
||||
'project_id': listener.project_id,
|
||||
'loadbalancer_id': listener.loadbalancer_id,
|
||||
'protocol': listener.protocol,
|
||||
'protocol_port': listener.port}})
|
||||
listener.id = response['listener']['id']
|
||||
response = lbaas.create_listener(
|
||||
name=listener.name,
|
||||
project_id=listener.project_id,
|
||||
load_balancer_id=listener.loadbalancer_id,
|
||||
protocol=listener.protocol,
|
||||
protocol_port=listener.port)
|
||||
listener.id = response['id']
|
||||
return listener
|
||||
|
||||
def _find_listener(self, listener):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_listeners(
|
||||
response = lbaas.listeners(
|
||||
name=listener.name,
|
||||
project_id=listener.project_id,
|
||||
loadbalancer_id=listener.loadbalancer_id,
|
||||
load_balancer_id=listener.loadbalancer_id,
|
||||
protocol=listener.protocol,
|
||||
protocol_port=listener.port)
|
||||
|
||||
try:
|
||||
listener.id = response['listeners'][0]['id']
|
||||
except (KeyError, IndexError):
|
||||
os_listener = next(response)
|
||||
listener.id = os_listener['id']
|
||||
except (KeyError, StopIteration):
|
||||
return None
|
||||
|
||||
return listener
|
||||
@ -575,49 +562,19 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
# TODO(ivc): make lb_algorithm configurable
|
||||
lb_algorithm = 'ROUND_ROBIN'
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
try:
|
||||
response = lbaas.create_lbaas_pool({'pool': {
|
||||
'name': pool.name,
|
||||
'project_id': pool.project_id,
|
||||
'listener_id': pool.listener_id,
|
||||
'loadbalancer_id': pool.loadbalancer_id,
|
||||
'protocol': pool.protocol,
|
||||
'lb_algorithm': lb_algorithm}})
|
||||
pool.id = response['pool']['id']
|
||||
return pool
|
||||
except n_exc.StateInvalidClient:
|
||||
(type_, value, tb) = sys.exc_info()
|
||||
try:
|
||||
self._cleanup_bogus_pool(lbaas, pool, lb_algorithm)
|
||||
except Exception:
|
||||
LOG.error('Pool creation traceback: %s',
|
||||
traceback.format_exception(type_, value, tb))
|
||||
raise
|
||||
else:
|
||||
six.reraise(type_, value, tb)
|
||||
|
||||
def _cleanup_bogus_pool(self, lbaas, pool, lb_algorithm):
|
||||
# REVISIT(ivc): LBaaSv2 creates pool object despite raising an
|
||||
# exception. The created pool is not bound to listener, but
|
||||
# it is bound to loadbalancer and will cause an error on
|
||||
# 'release_loadbalancer'.
|
||||
pools = lbaas.list_lbaas_pools(
|
||||
name=pool.name, project_id=pool.project_id,
|
||||
response = lbaas.create_pool(
|
||||
name=pool.name,
|
||||
project_id=pool.project_id,
|
||||
listener_id=pool.listener_id,
|
||||
loadbalancer_id=pool.loadbalancer_id,
|
||||
protocol=pool.protocol, lb_algorithm=lb_algorithm)
|
||||
bogus_pool_ids = [p['id'] for p in pools.get('pools')
|
||||
if not p['listeners'] and pool.name == p['name']]
|
||||
for pool_id in bogus_pool_ids:
|
||||
try:
|
||||
LOG.debug("Removing bogus pool %(id)s %(pool)s", {
|
||||
'id': pool_id, 'pool': pool})
|
||||
lbaas.delete_lbaas_pool(pool_id)
|
||||
except (n_exc.NotFound, n_exc.StateInvalidClient):
|
||||
pass
|
||||
protocol=pool.protocol,
|
||||
lb_algorithm=lb_algorithm)
|
||||
pool.id = response['id']
|
||||
return pool
|
||||
|
||||
def _find_pool(self, pool, by_listener=True):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_lbaas_pools(
|
||||
response = lbaas.pools(
|
||||
name=pool.name,
|
||||
project_id=pool.project_id,
|
||||
loadbalancer_id=pool.loadbalancer_id,
|
||||
@ -625,12 +582,10 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
try:
|
||||
if by_listener:
|
||||
pools = [p for p in response['pools']
|
||||
if pool.listener_id
|
||||
pools = [p for p in response if pool.listener_id
|
||||
in {l['id'] for l in p['listeners']}]
|
||||
else:
|
||||
pools = [p for p in response['pools']
|
||||
if pool.name == p['name']]
|
||||
pools = [p for p in response if pool.name == p['name']]
|
||||
|
||||
pool.id = pools[0]['id']
|
||||
except (KeyError, IndexError):
|
||||
@ -642,18 +597,19 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
def _create_member(self, member):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.create_lbaas_member(member.pool_id, {'member': {
|
||||
'name': member.name,
|
||||
'project_id': member.project_id,
|
||||
'subnet_id': member.subnet_id,
|
||||
'address': str(member.ip),
|
||||
'protocol_port': member.port}})
|
||||
member.id = response['member']['id']
|
||||
response = lbaas.create_member(
|
||||
member.pool_id,
|
||||
name=member.name,
|
||||
project_id=member.project_id,
|
||||
subnet_id=member.subnet_id,
|
||||
address=str(member.ip),
|
||||
protocol_port=member.port)
|
||||
member.id = response['id']
|
||||
return member
|
||||
|
||||
def _find_member(self, member):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_lbaas_members(
|
||||
response = lbaas.members(
|
||||
member.pool_id,
|
||||
name=member.name,
|
||||
project_id=member.project_id,
|
||||
@ -662,8 +618,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
protocol_port=member.port)
|
||||
|
||||
try:
|
||||
member.id = response['members'][0]['id']
|
||||
except (KeyError, IndexError):
|
||||
member.id = next(response)['id']
|
||||
except (KeyError, StopIteration):
|
||||
return None
|
||||
|
||||
return member
|
||||
@ -673,7 +629,9 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
try:
|
||||
result = create(obj)
|
||||
LOG.debug("Created %(obj)s", {'obj': result})
|
||||
except (n_exc.Conflict, n_exc.InternalServerError):
|
||||
except o_exc.HttpException as e:
|
||||
if e.status_code not in (409, 500):
|
||||
raise
|
||||
result = find(obj)
|
||||
if result:
|
||||
LOG.debug("Found %(obj)s", {'obj': result})
|
||||
@ -688,7 +646,7 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
result = self._ensure(obj, create, find)
|
||||
if result:
|
||||
return result
|
||||
except n_exc.StateInvalidClient:
|
||||
except o_exc.BadRequestException:
|
||||
continue
|
||||
|
||||
raise k_exc.ResourceNotReady(obj)
|
||||
@ -699,9 +657,9 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
try:
|
||||
delete(*args, **kwargs)
|
||||
return
|
||||
except (n_exc.Conflict, n_exc.StateInvalidClient):
|
||||
except (o_exc.ConflictException, o_exc.BadRequestException):
|
||||
self._wait_for_provisioning(loadbalancer, remaining)
|
||||
except n_exc.NotFound:
|
||||
except o_exc.ResourceNotFound:
|
||||
return
|
||||
|
||||
raise k_exc.ResourceNotReady(obj)
|
||||
@ -711,8 +669,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
|
||||
for remaining in self._provisioning_timer(timeout, interval):
|
||||
response = lbaas.show_loadbalancer(loadbalancer.id)
|
||||
status = response['loadbalancer']['provisioning_status']
|
||||
response = lbaas.get_load_balancer(loadbalancer.id)
|
||||
status = response['provisioning_status']
|
||||
if status == 'ACTIVE':
|
||||
LOG.debug("Provisioning complete for %(lb)s", {
|
||||
'lb': loadbalancer})
|
||||
@ -731,8 +689,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
for remaining in self._provisioning_timer(timeout, interval):
|
||||
try:
|
||||
lbaas.show_loadbalancer(loadbalancer.id)
|
||||
except n_exc.NotFound:
|
||||
lbaas.get_load_balancer(loadbalancer.id)
|
||||
except o_exc.ResourceNotFound:
|
||||
return
|
||||
|
||||
def _provisioning_timer(self, timeout,
|
||||
@ -777,20 +735,21 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
def get_lb_by_uuid(self, lb_uuid):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.show_loadbalancer(lb_uuid)
|
||||
try:
|
||||
return obj_lbaas.LBaaSLoadBalancer(
|
||||
id=response['loadbalancer']['id'],
|
||||
port_id=response['loadbalancer']['vip_port_id'],
|
||||
name=response['loadbalancer']['name'],
|
||||
project_id=response['loadbalancer']['project_id'],
|
||||
subnet_id=response['loadbalancer']['vip_subnet_id'],
|
||||
ip=response['loadbalancer']['vip_address'],
|
||||
security_groups=None,
|
||||
provider=response['loadbalancer']['provider'])
|
||||
except (KeyError, IndexError):
|
||||
response = lbaas.get_load_balancer(lb_uuid)
|
||||
except o_exc.ResourceNotFound:
|
||||
LOG.debug("Couldn't find loadbalancer with uuid=%s", lb_uuid)
|
||||
return None
|
||||
return None
|
||||
|
||||
return obj_lbaas.LBaaSLoadBalancer(
|
||||
id=response['id'],
|
||||
port_id=response['vip_port_id'],
|
||||
name=response['name'],
|
||||
project_id=response['project_id'],
|
||||
subnet_id=response['vip_subnet_id'],
|
||||
ip=response['vip_address'],
|
||||
security_groups=None,
|
||||
provider=response['provider'])
|
||||
|
||||
def get_pool_by_name(self, pool_name, project_id):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
@ -799,9 +758,8 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
# get_loadbalancer_pool_name function, which means that pool's name
|
||||
# is unique
|
||||
|
||||
pools_list = lbaas.list_lbaas_pools(
|
||||
project_id=project_id)
|
||||
for entry in pools_list['pools']:
|
||||
pools = lbaas.pools(project_id=project_id)
|
||||
for entry in pools:
|
||||
if not entry:
|
||||
continue
|
||||
if entry['name'] == pool_name:
|
||||
@ -830,30 +788,30 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
def release_l7_policy(self, loadbalancer, l7_policy):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
self._release(
|
||||
loadbalancer, l7_policy, lbaas.delete_lbaas_l7policy,
|
||||
loadbalancer, l7_policy, lbaas.delete_l7_policy,
|
||||
l7_policy.id)
|
||||
|
||||
def _create_l7_policy(self, l7_policy):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.create_lbaas_l7policy({'l7policy': {
|
||||
'action': _L7_POLICY_ACT_REDIRECT_TO_POOL,
|
||||
'listener_id': l7_policy.listener_id,
|
||||
'name': l7_policy.name,
|
||||
'project_id': l7_policy.project_id,
|
||||
'redirect_pool_id': l7_policy.redirect_pool_id}})
|
||||
l7_policy.id = response['l7policy']['id']
|
||||
response = lbaas.create_l7_policy(
|
||||
action=_L7_POLICY_ACT_REDIRECT_TO_POOL,
|
||||
listener_id=l7_policy.listener_id,
|
||||
name=l7_policy.name,
|
||||
project_id=l7_policy.project_id,
|
||||
redirect_pool_id=l7_policy.redirect_pool_id)
|
||||
l7_policy.id = response['id']
|
||||
return l7_policy
|
||||
|
||||
def _find_l7_policy(self, l7_policy):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_lbaas_l7policies(
|
||||
response = lbaas.l7_policies(
|
||||
name=l7_policy.name,
|
||||
project_id=l7_policy.project_id,
|
||||
redirect_pool_id=l7_policy.redirect_pool_id,
|
||||
listener_id=l7_policy.listener_id)
|
||||
try:
|
||||
l7_policy.id = response['l7policies'][0]['id']
|
||||
except (KeyError, IndexError):
|
||||
l7_policy.id = next(response)['id']
|
||||
except (KeyError, StopIteration):
|
||||
return None
|
||||
return l7_policy
|
||||
|
||||
@ -869,49 +827,47 @@ class LBaaSv2Driver(base.LBaaSDriver):
|
||||
|
||||
def _create_l7_rule(self, l7_rule):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.create_lbaas_l7rule(
|
||||
response = lbaas.create_l7_rule(
|
||||
l7_rule.l7policy_id,
|
||||
{'rule': {'compare_type': l7_rule.compare_type,
|
||||
'type': l7_rule.type,
|
||||
'value': l7_rule.value}})
|
||||
l7_rule.id = response['rule']['id']
|
||||
compare_type=l7_rule.compare_type,
|
||||
type=l7_rule.type,
|
||||
value=l7_rule.value)
|
||||
l7_rule.id = response['id']
|
||||
return l7_rule
|
||||
|
||||
def _find_l7_rule(self, l7_rule):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
response = lbaas.list_lbaas_l7rules(
|
||||
response = lbaas.l7_rules(
|
||||
l7_rule.l7policy_id,
|
||||
type=l7_rule.type,
|
||||
value=l7_rule.value,
|
||||
compare_type=l7_rule.compare_type)
|
||||
try:
|
||||
l7_rule.id = response['rules'][0]['id']
|
||||
except (KeyError, IndexError):
|
||||
l7_rule.id = next(response)['id']
|
||||
except (KeyError, StopIteration):
|
||||
return None
|
||||
return l7_rule
|
||||
|
||||
def release_l7_rule(self, loadbalancer, l7_rule):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
self._release(
|
||||
loadbalancer, l7_rule, lbaas.delete_lbaas_l7rule,
|
||||
loadbalancer, l7_rule, lbaas.delete_l7_rule,
|
||||
l7_rule.id, l7_rule.l7policy_id)
|
||||
|
||||
def update_l7_rule(self, l7_rule, new_value):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
try:
|
||||
lbaas.update_lbaas_l7rule(
|
||||
lbaas.update_l7_rule(
|
||||
l7_rule.id, l7_rule.l7policy_id,
|
||||
{'rule': {'value': new_value}})
|
||||
|
||||
except n_exc.NeutronClientException:
|
||||
value=new_value)
|
||||
except o_exc.SDKException:
|
||||
LOG.exception("Failed to update l7_rule- id=%s ", l7_rule.id)
|
||||
raise
|
||||
|
||||
def is_pool_used_by_other_l7policies(self, l7policy, pool):
|
||||
lbaas = clients.get_loadbalancer_client()
|
||||
l7policy_list = lbaas.list_lbaas_l7policies(
|
||||
project_id=l7policy.project_id)
|
||||
for entry in l7policy_list['l7policies']:
|
||||
l7policy_list = lbaas.l7_policies(project_id=l7policy.project_id)
|
||||
for entry in l7policy_list:
|
||||
if not entry:
|
||||
continue
|
||||
if (entry['redirect_pool_id'] == pool.id and
|
||||
|
@ -250,16 +250,11 @@ class LoadBalancerHandler(k8s_base.ResourceEventHandler):
|
||||
if not lbaas_state:
|
||||
return
|
||||
# NOTE(ivc): deleting pool deletes its members
|
||||
if self._drv_lbaas.cascading_capable:
|
||||
self._drv_lbaas.release_loadbalancer(
|
||||
loadbalancer=lbaas_state.loadbalancer)
|
||||
if lbaas_state.service_pub_ip_info:
|
||||
self._drv_service_pub_ip.release_pub_ip(
|
||||
lbaas_state.service_pub_ip_info)
|
||||
else:
|
||||
lbaas_state.members = []
|
||||
self._sync_lbaas_members(endpoints, lbaas_state,
|
||||
obj_lbaas.LBaaSServiceSpec())
|
||||
self._drv_lbaas.release_loadbalancer(
|
||||
loadbalancer=lbaas_state.loadbalancer)
|
||||
if lbaas_state.service_pub_ip_info:
|
||||
self._drv_service_pub_ip.release_pub_ip(
|
||||
lbaas_state.service_pub_ip_info)
|
||||
|
||||
def _should_ignore(self, endpoints, lbaas_spec):
|
||||
return not(lbaas_spec and
|
||||
|
@ -16,6 +16,7 @@
|
||||
import mock
|
||||
|
||||
from neutronclient.common import exceptions as n_exc
|
||||
from openstack import exceptions as o_exc
|
||||
|
||||
from kuryr_kubernetes.controller.drivers import lbaasv2 as d_lbaasv2
|
||||
from kuryr_kubernetes import exceptions as k_exc
|
||||
@ -68,24 +69,9 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_driver, name, project_id, subnet_id, ip,
|
||||
sg_ids, 'ClusterIP')
|
||||
|
||||
def test_release_loadbalancer(self):
|
||||
self.useFixture(k_fix.MockNeutronClient()).client
|
||||
lbaas = self.useFixture(k_fix.MockLBaaSClient()).client
|
||||
lbaas.cascading_capable = False
|
||||
cls = d_lbaasv2.LBaaSv2Driver
|
||||
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
|
||||
loadbalancer = mock.Mock()
|
||||
|
||||
cls.release_loadbalancer(m_driver, loadbalancer)
|
||||
|
||||
m_driver._release.assert_called_once_with(loadbalancer, loadbalancer,
|
||||
lbaas.delete_loadbalancer,
|
||||
loadbalancer.id)
|
||||
|
||||
def test_cascade_release_loadbalancer(self):
|
||||
self.useFixture(k_fix.MockNeutronClient()).client
|
||||
lbaas = self.useFixture(k_fix.MockLBaaSClient()).client
|
||||
lbaas.cascading_capable = True
|
||||
lbaas.lbaas_loadbalancer_path = "boo %s"
|
||||
cls = d_lbaasv2.LBaaSv2Driver
|
||||
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
|
||||
@ -94,9 +80,8 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls.release_loadbalancer(m_driver, loadbalancer)
|
||||
|
||||
m_driver._release.assert_called_once_with(
|
||||
loadbalancer, loadbalancer, lbaas.delete,
|
||||
lbaas.lbaas_loadbalancer_path % loadbalancer.id,
|
||||
params={'cascade': True})
|
||||
loadbalancer, loadbalancer, lbaas.delete_load_balancer,
|
||||
loadbalancer.id, cascade=True)
|
||||
|
||||
def _test_ensure_listener(self):
|
||||
cls = d_lbaasv2.LBaaSv2Driver
|
||||
@ -146,7 +131,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
loadbalancer = obj_lbaas.LBaaSLoadBalancer(
|
||||
id=loadbalancer_id, name=name, project_id=project_id,
|
||||
subnet_id=subnet_id, ip=ip, provider=provider)
|
||||
m_driver._ensure_provisioned.side_effect = n_exc.BadRequest
|
||||
m_driver._ensure_provisioned.side_effect = o_exc.BadRequestException
|
||||
|
||||
resp = cls.ensure_listener(m_driver, loadbalancer,
|
||||
protocol, port)
|
||||
@ -205,7 +190,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls.release_pool(m_driver, loadbalancer, pool)
|
||||
|
||||
m_driver._release.assert_called_once_with(loadbalancer, pool,
|
||||
lbaas.delete_lbaas_pool,
|
||||
lbaas.delete_pool,
|
||||
pool.id)
|
||||
|
||||
def test_ensure_member(self):
|
||||
@ -251,7 +236,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls.release_member(m_driver, loadbalancer, member)
|
||||
|
||||
m_driver._release.assert_called_once_with(loadbalancer, member,
|
||||
lbaas.delete_lbaas_member,
|
||||
lbaas.delete_member,
|
||||
member.id, member.pool_id)
|
||||
|
||||
def test_create_loadbalancer(self):
|
||||
@ -263,18 +248,18 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
|
||||
security_groups=[])
|
||||
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
|
||||
req = {'loadbalancer': {
|
||||
req = {
|
||||
'name': loadbalancer.name,
|
||||
'project_id': loadbalancer.project_id,
|
||||
'vip_address': str(loadbalancer.ip),
|
||||
'vip_subnet_id': loadbalancer.subnet_id,
|
||||
}}
|
||||
resp = {'loadbalancer': {'id': loadbalancer_id, 'provider': 'haproxy'}}
|
||||
lbaas.create_loadbalancer.return_value = resp
|
||||
}
|
||||
resp = {'id': loadbalancer_id, 'provider': 'haproxy'}
|
||||
lbaas.create_load_balancer.return_value = resp
|
||||
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
|
||||
|
||||
ret = cls._create_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.create_loadbalancer.assert_called_once_with(req)
|
||||
lbaas.create_load_balancer.assert_called_once_with(**req)
|
||||
for attr in loadbalancer.obj_fields:
|
||||
self.assertEqual(getattr(loadbalancer, attr),
|
||||
getattr(ret, attr))
|
||||
@ -290,19 +275,19 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
security_groups=[],
|
||||
provider='amphora')
|
||||
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
|
||||
req = {'loadbalancer': {
|
||||
req = {
|
||||
'name': loadbalancer.name,
|
||||
'project_id': loadbalancer.project_id,
|
||||
'vip_address': str(loadbalancer.ip),
|
||||
'vip_subnet_id': loadbalancer.subnet_id,
|
||||
'provider': loadbalancer.provider,
|
||||
}}
|
||||
resp = {'loadbalancer': {'id': loadbalancer_id, 'provider': 'amphora'}}
|
||||
lbaas.create_loadbalancer.return_value = resp
|
||||
}
|
||||
resp = {'id': loadbalancer_id, 'provider': 'amphora'}
|
||||
lbaas.create_load_balancer.return_value = resp
|
||||
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
|
||||
|
||||
ret = cls._create_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.create_loadbalancer.assert_called_once_with(req)
|
||||
lbaas.create_load_balancer.assert_called_once_with(**req)
|
||||
for attr in loadbalancer.obj_fields:
|
||||
self.assertEqual(getattr(loadbalancer, attr),
|
||||
getattr(ret, attr))
|
||||
@ -318,19 +303,19 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
security_groups=[],
|
||||
provider='amphora')
|
||||
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
|
||||
req = {'loadbalancer': {
|
||||
req = {
|
||||
'name': loadbalancer.name,
|
||||
'project_id': loadbalancer.project_id,
|
||||
'vip_address': str(loadbalancer.ip),
|
||||
'vip_subnet_id': loadbalancer.subnet_id,
|
||||
'provider': loadbalancer.provider,
|
||||
}}
|
||||
resp = {'loadbalancer': {'id': loadbalancer_id, 'provider': 'haproxy'}}
|
||||
lbaas.create_loadbalancer.return_value = resp
|
||||
}
|
||||
resp = {'id': loadbalancer_id, 'provider': 'haproxy'}
|
||||
lbaas.create_load_balancer.return_value = resp
|
||||
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
|
||||
|
||||
ret = cls._create_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.create_loadbalancer.assert_called_once_with(req)
|
||||
lbaas.create_load_balancer.assert_called_once_with(**req)
|
||||
self.assertIsNone(ret)
|
||||
|
||||
def test_find_loadbalancer(self):
|
||||
@ -342,14 +327,14 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
|
||||
provider='haproxy', security_groups=[])
|
||||
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
|
||||
resp = {'loadbalancers': [{'id': loadbalancer_id,
|
||||
'provider': 'haproxy',
|
||||
'provisioning_status': 'ACTIVE'}]}
|
||||
lbaas.list_loadbalancers.return_value = resp
|
||||
resp = iter([{'id': loadbalancer_id,
|
||||
'provider': 'haproxy',
|
||||
'provisioning_status': 'ACTIVE'}])
|
||||
lbaas.load_balancers.return_value = resp
|
||||
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
|
||||
|
||||
ret = cls._find_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.list_loadbalancers.assert_called_once_with(
|
||||
lbaas.load_balancers.assert_called_once_with(
|
||||
name=loadbalancer.name,
|
||||
project_id=loadbalancer.project_id,
|
||||
vip_address=str(loadbalancer.ip),
|
||||
@ -367,11 +352,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
loadbalancer = obj_lbaas.LBaaSLoadBalancer(
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', ip='1.2.3.4',
|
||||
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1')
|
||||
resp = {'loadbalancers': []}
|
||||
lbaas.list_loadbalancers.return_value = resp
|
||||
resp = iter([])
|
||||
lbaas.load_balancers.return_value = resp
|
||||
|
||||
ret = cls._find_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.list_loadbalancers.assert_called_once_with(
|
||||
lbaas.load_balancers.assert_called_once_with(
|
||||
name=loadbalancer.name,
|
||||
project_id=loadbalancer.project_id,
|
||||
vip_address=str(loadbalancer.ip),
|
||||
@ -387,14 +372,14 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', ip='1.2.3.4',
|
||||
subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1')
|
||||
loadbalancer_id = '00EE9E11-91C2-41CF-8FD4-7970579E5C4C'
|
||||
resp = {'loadbalancers': [{'id': loadbalancer_id,
|
||||
'provider': 'haproxy',
|
||||
'provisioning_status': 'ERROR'}]}
|
||||
lbaas.list_loadbalancers.return_value = resp
|
||||
resp = iter([{'id': loadbalancer_id,
|
||||
'provider': 'haproxy',
|
||||
'provisioning_status': 'ERROR'}])
|
||||
lbaas.load_balancers.return_value = resp
|
||||
m_driver._get_vip_port.return_value = {'id': mock.sentinel.port_id}
|
||||
|
||||
ret = cls._find_loadbalancer(m_driver, loadbalancer)
|
||||
lbaas.list_loadbalancers.assert_called_once_with(
|
||||
lbaas.load_balancers.assert_called_once_with(
|
||||
name=loadbalancer.name,
|
||||
project_id=loadbalancer.project_id,
|
||||
vip_address=str(loadbalancer.ip),
|
||||
@ -410,17 +395,17 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
|
||||
port=1234, loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
listener_id = 'A57B7771-6050-4CA8-A63C-443493EC98AB'
|
||||
req = {'listener': {
|
||||
req = {
|
||||
'name': listener.name,
|
||||
'project_id': listener.project_id,
|
||||
'loadbalancer_id': listener.loadbalancer_id,
|
||||
'load_balancer_id': listener.loadbalancer_id,
|
||||
'protocol': listener.protocol,
|
||||
'protocol_port': listener.port}}
|
||||
resp = {'listener': {'id': listener_id}}
|
||||
'protocol_port': listener.port}
|
||||
resp = {'id': listener_id}
|
||||
lbaas.create_listener.return_value = resp
|
||||
|
||||
ret = cls._create_listener(m_driver, listener)
|
||||
lbaas.create_listener.assert_called_once_with(req)
|
||||
lbaas.create_listener.assert_called_once_with(**req)
|
||||
for attr in listener.obj_fields:
|
||||
self.assertEqual(getattr(listener, attr),
|
||||
getattr(ret, attr))
|
||||
@ -434,14 +419,13 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
|
||||
port=1234, loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
listener_id = 'A57B7771-6050-4CA8-A63C-443493EC98AB'
|
||||
resp = {'listeners': [{'id': listener_id}]}
|
||||
lbaas.list_listeners.return_value = resp
|
||||
lbaas.listeners.return_value = iter([{'id': listener_id}])
|
||||
|
||||
ret = cls._find_listener(m_driver, listener)
|
||||
lbaas.list_listeners.assert_called_once_with(
|
||||
lbaas.listeners.assert_called_once_with(
|
||||
name=listener.name,
|
||||
project_id=listener.project_id,
|
||||
loadbalancer_id=listener.loadbalancer_id,
|
||||
load_balancer_id=listener.loadbalancer_id,
|
||||
protocol=listener.protocol,
|
||||
protocol_port=listener.port)
|
||||
for attr in listener.obj_fields:
|
||||
@ -456,14 +440,14 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
listener = obj_lbaas.LBaaSListener(
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
|
||||
port=1234, loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
resp = {'listeners': []}
|
||||
lbaas.list_listeners.return_value = resp
|
||||
resp = iter([])
|
||||
lbaas.listeners.return_value = resp
|
||||
|
||||
ret = cls._find_listener(m_driver, listener)
|
||||
lbaas.list_listeners.assert_called_once_with(
|
||||
lbaas.listeners.assert_called_once_with(
|
||||
name=listener.name,
|
||||
project_id=listener.project_id,
|
||||
loadbalancer_id=listener.loadbalancer_id,
|
||||
load_balancer_id=listener.loadbalancer_id,
|
||||
protocol=listener.protocol,
|
||||
protocol_port=listener.port)
|
||||
self.assertIsNone(ret)
|
||||
@ -478,18 +462,18 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
listener_id='A57B7771-6050-4CA8-A63C-443493EC98AB',
|
||||
loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
pool_id = 'D4F35594-27EB-4F4C-930C-31DD40F53B77'
|
||||
req = {'pool': {
|
||||
req = {
|
||||
'name': pool.name,
|
||||
'project_id': pool.project_id,
|
||||
'listener_id': pool.listener_id,
|
||||
'loadbalancer_id': pool.loadbalancer_id,
|
||||
'protocol': pool.protocol,
|
||||
'lb_algorithm': lb_algorithm}}
|
||||
resp = {'pool': {'id': pool_id}}
|
||||
lbaas.create_lbaas_pool.return_value = resp
|
||||
'lb_algorithm': lb_algorithm}
|
||||
resp = {'id': pool_id}
|
||||
lbaas.create_pool.return_value = resp
|
||||
|
||||
ret = cls._create_pool(m_driver, pool)
|
||||
lbaas.create_lbaas_pool.assert_called_once_with(req)
|
||||
lbaas.create_pool.assert_called_once_with(**req)
|
||||
for attr in pool.obj_fields:
|
||||
self.assertEqual(getattr(pool, attr),
|
||||
getattr(ret, attr))
|
||||
@ -504,24 +488,18 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
|
||||
listener_id='A57B7771-6050-4CA8-A63C-443493EC98AB',
|
||||
loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
req = {'pool': {
|
||||
req = {
|
||||
'name': pool.name,
|
||||
'project_id': pool.project_id,
|
||||
'listener_id': pool.listener_id,
|
||||
'loadbalancer_id': pool.loadbalancer_id,
|
||||
'protocol': pool.protocol,
|
||||
'lb_algorithm': lb_algorithm}}
|
||||
lbaas.create_lbaas_pool.side_effect = n_exc.StateInvalidClient
|
||||
'lb_algorithm': lb_algorithm}
|
||||
lbaas.create_pool.side_effect = n_exc.StateInvalidClient
|
||||
|
||||
self.assertRaises(n_exc.StateInvalidClient, cls._create_pool, m_driver,
|
||||
pool)
|
||||
lbaas.create_lbaas_pool.assert_called_once_with(req)
|
||||
m_driver._cleanup_bogus_pool.assert_called_once_with(lbaas, pool,
|
||||
lb_algorithm)
|
||||
|
||||
def test_cleanup_bogus_pool(self):
|
||||
# TODO(ivc): add unit test or get rid of _cleanup_bogus_pool
|
||||
self.skipTest("not implemented")
|
||||
lbaas.create_pool.assert_called_once_with(**req)
|
||||
|
||||
def test_find_pool_by_listener(self):
|
||||
lbaas = self.useFixture(k_fix.MockLBaaSClient()).client
|
||||
@ -532,12 +510,12 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
listener_id='A57B7771-6050-4CA8-A63C-443493EC98AB',
|
||||
loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
pool_id = 'D4F35594-27EB-4F4C-930C-31DD40F53B77'
|
||||
resp = {'pools': [{'id': pool_id,
|
||||
'listeners': [{'id': pool.listener_id}]}]}
|
||||
lbaas.list_lbaas_pools.return_value = resp
|
||||
resp = [{'id': pool_id,
|
||||
'listeners': [{'id': pool.listener_id}]}]
|
||||
lbaas.pools.return_value = resp
|
||||
|
||||
ret = cls._find_pool(m_driver, pool)
|
||||
lbaas.list_lbaas_pools.assert_called_once_with(
|
||||
lbaas.pools.assert_called_once_with(
|
||||
name=pool.name,
|
||||
project_id=pool.project_id,
|
||||
loadbalancer_id=pool.loadbalancer_id,
|
||||
@ -555,11 +533,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', protocol='TCP',
|
||||
listener_id='A57B7771-6050-4CA8-A63C-443493EC98AB',
|
||||
loadbalancer_id='00EE9E11-91C2-41CF-8FD4-7970579E5C4C')
|
||||
resp = {'pools': []}
|
||||
lbaas.list_lbaas_pools.return_value = resp
|
||||
resp = []
|
||||
lbaas.pools.return_value = resp
|
||||
|
||||
ret = cls._find_pool(m_driver, pool)
|
||||
lbaas.list_lbaas_pools.assert_called_once_with(
|
||||
lbaas.pools.assert_called_once_with(
|
||||
name=pool.name,
|
||||
project_id=pool.project_id,
|
||||
loadbalancer_id=pool.loadbalancer_id,
|
||||
@ -575,18 +553,18 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
port=1234, subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
|
||||
pool_id='D4F35594-27EB-4F4C-930C-31DD40F53B77')
|
||||
member_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
req = {'member': {
|
||||
req = {
|
||||
'name': member.name,
|
||||
'project_id': member.project_id,
|
||||
'subnet_id': member.subnet_id,
|
||||
'address': str(member.ip),
|
||||
'protocol_port': member.port}}
|
||||
resp = {'member': {'id': member_id}}
|
||||
lbaas.create_lbaas_member.return_value = resp
|
||||
'protocol_port': member.port}
|
||||
resp = {'id': member_id}
|
||||
lbaas.create_member.return_value = resp
|
||||
|
||||
ret = cls._create_member(m_driver, member)
|
||||
lbaas.create_lbaas_member.assert_called_once_with(
|
||||
member.pool_id, req)
|
||||
lbaas.create_member.assert_called_once_with(
|
||||
member.pool_id, **req)
|
||||
for attr in member.obj_fields:
|
||||
self.assertEqual(getattr(member, attr),
|
||||
getattr(ret, attr))
|
||||
@ -601,11 +579,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
port=1234, subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
|
||||
pool_id='D4F35594-27EB-4F4C-930C-31DD40F53B77')
|
||||
member_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
resp = {'members': [{'id': member_id}]}
|
||||
lbaas.list_lbaas_members.return_value = resp
|
||||
resp = iter([{'id': member_id}])
|
||||
lbaas.members.return_value = resp
|
||||
|
||||
ret = cls._find_member(m_driver, member)
|
||||
lbaas.list_lbaas_members.assert_called_once_with(
|
||||
lbaas.members.assert_called_once_with(
|
||||
member.pool_id,
|
||||
name=member.name,
|
||||
project_id=member.project_id,
|
||||
@ -625,11 +603,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
name='TEST_NAME', project_id='TEST_PROJECT', ip='1.2.3.4',
|
||||
port=1234, subnet_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1',
|
||||
pool_id='D4F35594-27EB-4F4C-930C-31DD40F53B77')
|
||||
resp = {'members': []}
|
||||
lbaas.list_lbaas_members.return_value = resp
|
||||
resp = iter([])
|
||||
lbaas.members.return_value = resp
|
||||
|
||||
ret = cls._find_member(m_driver, member)
|
||||
lbaas.list_lbaas_members.assert_called_once_with(
|
||||
lbaas.members.assert_called_once_with(
|
||||
member.pool_id,
|
||||
name=member.name,
|
||||
project_id=member.project_id,
|
||||
@ -667,10 +645,12 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
self.assertEqual(expected_result, ret)
|
||||
|
||||
def test_ensure_with_conflict(self):
|
||||
self._verify_ensure_with_exception(n_exc.Conflict)
|
||||
self._verify_ensure_with_exception(
|
||||
o_exc.ConflictException(http_status=409))
|
||||
|
||||
def test_ensure_with_internalservererror(self):
|
||||
self._verify_ensure_with_exception(n_exc.InternalServerError)
|
||||
self._verify_ensure_with_exception(
|
||||
o_exc.HttpException(http_status=500))
|
||||
|
||||
def test_request(self):
|
||||
cls = d_lbaasv2.LBaaSv2Driver
|
||||
@ -682,7 +662,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
expected_result = mock.sentinel.expected_result
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
m_driver._ensure.side_effect = [n_exc.StateInvalidClient,
|
||||
m_driver._ensure.side_effect = [o_exc.BadRequestException,
|
||||
expected_result]
|
||||
|
||||
ret = cls._ensure_provisioned(m_driver, loadbalancer, obj, create,
|
||||
@ -738,7 +718,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_delete = mock.Mock()
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
m_delete.side_effect = [n_exc.StateInvalidClient, None]
|
||||
m_delete.side_effect = [o_exc.BadRequestException, None]
|
||||
|
||||
cls._release(m_driver, loadbalancer, obj, m_delete)
|
||||
|
||||
@ -754,7 +734,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_delete = mock.Mock()
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
m_delete.side_effect = n_exc.NotFound
|
||||
m_delete.side_effect = o_exc.NotFoundException
|
||||
|
||||
cls._release(m_driver, loadbalancer, obj, m_delete)
|
||||
|
||||
@ -769,7 +749,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_delete = mock.Mock()
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
m_delete.side_effect = n_exc.StateInvalidClient
|
||||
m_delete.side_effect = o_exc.ConflictException
|
||||
|
||||
self.assertRaises(k_exc.ResourceNotReady, cls._release, m_driver,
|
||||
loadbalancer, obj, m_delete)
|
||||
@ -787,12 +767,12 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
timeout = mock.sentinel.timeout
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
resp = {'loadbalancer': {'provisioning_status': 'ACTIVE'}}
|
||||
lbaas.show_loadbalancer.return_value = resp
|
||||
resp = {'provisioning_status': 'ACTIVE'}
|
||||
lbaas.get_load_balancer.return_value = resp
|
||||
|
||||
cls._wait_for_provisioning(m_driver, loadbalancer, timeout)
|
||||
|
||||
lbaas.show_loadbalancer.assert_called_once_with(loadbalancer.id)
|
||||
lbaas.get_load_balancer.assert_called_once_with(loadbalancer.id)
|
||||
|
||||
def test_wait_for_provisioning_not_ready(self):
|
||||
lbaas = self.useFixture(k_fix.MockLBaaSClient()).client
|
||||
@ -802,13 +782,13 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
timeout = mock.sentinel.timeout
|
||||
timer = [mock.sentinel.t0, mock.sentinel.t1]
|
||||
m_driver._provisioning_timer.return_value = timer
|
||||
resp = {'loadbalancer': {'provisioning_status': 'NOT_ACTIVE'}}
|
||||
lbaas.show_loadbalancer.return_value = resp
|
||||
resp = {'provisioning_status': 'NOT_ACTIVE'}
|
||||
lbaas.get_load_balancer.return_value = resp
|
||||
|
||||
self.assertRaises(k_exc.ResourceNotReady, cls._wait_for_provisioning,
|
||||
m_driver, loadbalancer, timeout)
|
||||
|
||||
self.assertEqual(len(timer), lbaas.show_loadbalancer.call_count)
|
||||
self.assertEqual(len(timer), lbaas.get_load_balancer.call_count)
|
||||
|
||||
def test_provisioning_timer(self):
|
||||
# REVISIT(ivc): add test if _provisioning_timer is to stay
|
||||
@ -820,7 +800,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
|
||||
|
||||
pools = {'name': 'KUKU', 'id': 'a2a62ea7-e3bf-40df-8c09-aa0c29876a6b'}
|
||||
lbaas.list_lbaas_pools.return_value = {'pools': [pools]}
|
||||
lbaas.pools.return_value = [pools]
|
||||
pool_name = 'NOT_KUKU'
|
||||
project_id = 'TEST_PROJECT'
|
||||
|
||||
@ -859,25 +839,23 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
protocol=pool_protocol,
|
||||
id=pool_id)
|
||||
|
||||
resp = {"pools": [
|
||||
{
|
||||
"protocol": pool_protocol,
|
||||
"loadbalancers": [
|
||||
{
|
||||
"id": pool_lb_id
|
||||
}
|
||||
],
|
||||
"listeners": resp_listeners,
|
||||
"project_id": pool_project_id,
|
||||
"id": pool_id,
|
||||
"name": pool_name
|
||||
}
|
||||
]}
|
||||
resp = [{
|
||||
"protocol": pool_protocol,
|
||||
"loadbalancers": [
|
||||
{
|
||||
"id": pool_lb_id
|
||||
}
|
||||
],
|
||||
"listeners": resp_listeners,
|
||||
"project_id": pool_project_id,
|
||||
"id": pool_id,
|
||||
"name": pool_name
|
||||
}]
|
||||
|
||||
lbaas.list_lbaas_pools.return_value = resp
|
||||
lbaas.pools.return_value = resp
|
||||
|
||||
pool = cls.get_pool_by_name(m_driver, pool_name, pool_project_id)
|
||||
lbaas.list_lbaas_pools.assert_called_once()
|
||||
lbaas.pools.assert_called_once()
|
||||
for attr in expected_result.obj_fields:
|
||||
self.assertEqual(getattr(expected_result, attr),
|
||||
getattr(pool, attr))
|
||||
@ -887,7 +865,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls = d_lbaasv2.LBaaSv2Driver
|
||||
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
|
||||
pools = {}
|
||||
lbaas.list_lbaas_pools.return_value = {'pools': [pools]}
|
||||
lbaas.pools.return_value = [pools]
|
||||
pool_name = 'NOT_KUKU'
|
||||
project_id = 'TEST_PROJECT'
|
||||
|
||||
@ -913,18 +891,18 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
subnet_id=loadbalancer_subnet_id, ip=loadbalancer_vip,
|
||||
security_groups=None, provider=loadbalancer_provider)
|
||||
|
||||
resp = {'loadbalancer': {'id': loadbalancer_id,
|
||||
'vip_port_id': loadbalancer_vip_port_id,
|
||||
'name': loadbalancer_name,
|
||||
'project_id': loadbalancer_project_id,
|
||||
'vip_subnet_id': loadbalancer_subnet_id,
|
||||
'vip_address': loadbalancer_vip,
|
||||
'provider': loadbalancer_provider}}
|
||||
resp = {'id': loadbalancer_id,
|
||||
'vip_port_id': loadbalancer_vip_port_id,
|
||||
'name': loadbalancer_name,
|
||||
'project_id': loadbalancer_project_id,
|
||||
'vip_subnet_id': loadbalancer_subnet_id,
|
||||
'vip_address': loadbalancer_vip,
|
||||
'provider': loadbalancer_provider}
|
||||
|
||||
lbaas.show_loadbalancer.return_value = resp
|
||||
lbaas.get_load_balancer.return_value = resp
|
||||
|
||||
ret = cls.get_lb_by_uuid(m_driver, loadbalancer_id)
|
||||
lbaas.show_loadbalancer.assert_called_once()
|
||||
lbaas.get_load_balancer.assert_called_once()
|
||||
for attr in expected_lb.obj_fields:
|
||||
self.assertEqual(getattr(expected_lb, attr),
|
||||
getattr(ret, attr))
|
||||
@ -936,13 +914,13 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
m_driver = mock.Mock(spec=d_lbaasv2.LBaaSv2Driver)
|
||||
|
||||
resp = {'loadbalancer': {}}
|
||||
lbaas.show_loadbalancer.return_value = resp
|
||||
lbaas.get_load_balancer.return_value = resp
|
||||
|
||||
requested_uuid = '00EE9E11-91C2-41CF-8FD4-7970579EFFFF'
|
||||
lbaas.show_loadbalancer.return_value = resp
|
||||
lbaas.get_load_balancer.side_effect = o_exc.ResourceNotFound
|
||||
|
||||
ret = cls.get_lb_by_uuid(m_driver, requested_uuid)
|
||||
lbaas.show_loadbalancer.assert_called_once()
|
||||
lbaas.get_load_balancer.assert_called_once()
|
||||
self.assertIsNone(ret)
|
||||
|
||||
def test_ensure_l7policy(self):
|
||||
@ -987,7 +965,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls.release_l7_policy(m_driver, loadbalancer, l7_policy)
|
||||
|
||||
m_driver._release.assert_called_once_with(
|
||||
loadbalancer, l7_policy, lbaas.delete_lbaas_l7policy,
|
||||
loadbalancer, l7_policy, lbaas.delete_l7_policy,
|
||||
l7_policy.id)
|
||||
|
||||
def test_create_l7policy(self):
|
||||
@ -1002,17 +980,17 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
redirect_pool_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1')
|
||||
|
||||
l7policy_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
req = {'l7policy': {
|
||||
req = {
|
||||
'action': 'REDIRECT_TO_POOL',
|
||||
'listener_id': l7_policy.listener_id,
|
||||
'name': l7_policy.name,
|
||||
'project_id': l7_policy.project_id,
|
||||
'redirect_pool_id': l7_policy.redirect_pool_id}}
|
||||
resp = {'l7policy': {'id': l7policy_id}}
|
||||
lbaas.create_lbaas_l7policy.return_value = resp
|
||||
'redirect_pool_id': l7_policy.redirect_pool_id}
|
||||
resp = {'id': l7policy_id}
|
||||
lbaas.create_l7_policy.return_value = resp
|
||||
|
||||
ret = cls._create_l7_policy(m_driver, l7_policy)
|
||||
lbaas.create_lbaas_l7policy.assert_called_once_with(req)
|
||||
lbaas.create_l7_policy.assert_called_once_with(**req)
|
||||
for attr in l7_policy.obj_fields:
|
||||
self.assertEqual(getattr(l7_policy, attr),
|
||||
getattr(ret, attr))
|
||||
@ -1030,11 +1008,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
|
||||
l7policy_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
|
||||
resp = {'l7policies': [{'id': l7policy_id}]}
|
||||
lbaas.list_lbaas_l7policies.return_value = resp
|
||||
resp = iter([{'id': l7policy_id}])
|
||||
lbaas.l7_policies.return_value = resp
|
||||
|
||||
ret = cls._find_l7_policy(m_driver, l7_policy)
|
||||
lbaas.list_lbaas_l7policies.assert_called_once_with(
|
||||
lbaas.l7_policies.assert_called_once_with(
|
||||
name=l7_policy.name,
|
||||
project_id=l7_policy.project_id,
|
||||
redirect_pool_id=l7_policy.redirect_pool_id,
|
||||
@ -1054,11 +1032,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
listener_id='D4F35594-27EB-4F4C-930C-31DD40F53B77',
|
||||
redirect_pool_id='D3FA400A-F543-4B91-9CD3-047AF0CE42D1')
|
||||
|
||||
resp = {'l7policies': []}
|
||||
lbaas.list_lbaas_l7policies.return_value = resp
|
||||
resp = iter([])
|
||||
lbaas.l7_policies.return_value = resp
|
||||
|
||||
ret = cls._find_l7_policy(m_driver, l7_policy)
|
||||
lbaas.list_lbaas_l7policies.assert_called_once_with(
|
||||
lbaas.l7_policies.assert_called_once_with(
|
||||
name=l7_policy.name,
|
||||
project_id=l7_policy.project_id,
|
||||
redirect_pool_id=l7_policy.redirect_pool_id,
|
||||
@ -1104,7 +1082,7 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
cls.release_l7_rule(m_driver, loadbalancer, l7_rule)
|
||||
|
||||
m_driver._release.assert_called_once_with(
|
||||
loadbalancer, l7_rule, lbaas.delete_lbaas_l7rule,
|
||||
loadbalancer, l7_rule, lbaas.delete_l7_rule,
|
||||
l7_rule.id, l7_rule.l7policy_id)
|
||||
|
||||
def test_create_l7_rule(self):
|
||||
@ -1120,17 +1098,16 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
|
||||
l7_rule_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
|
||||
req = {'rule': {
|
||||
'compare_type': l7_rule.compare_type,
|
||||
'type': l7_rule.type,
|
||||
'value': l7_rule.value}}
|
||||
req = {'compare_type': l7_rule.compare_type,
|
||||
'type': l7_rule.type,
|
||||
'value': l7_rule.value}
|
||||
|
||||
resp = {'rule': {'id': l7_rule_id}}
|
||||
lbaas.create_lbaas_l7rule.return_value = resp
|
||||
resp = {'id': l7_rule_id}
|
||||
lbaas.create_l7_rule.return_value = resp
|
||||
|
||||
ret = cls._create_l7_rule(m_driver, l7_rule)
|
||||
lbaas.create_lbaas_l7rule.assert_called_once_with(
|
||||
l7_rule.l7policy_id, req)
|
||||
lbaas.create_l7_rule.assert_called_once_with(l7_rule.l7policy_id,
|
||||
**req)
|
||||
for attr in l7_rule.obj_fields:
|
||||
self.assertEqual(getattr(l7_rule, attr),
|
||||
getattr(ret, attr))
|
||||
@ -1147,11 +1124,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
value='www.test.com')
|
||||
|
||||
l7_rule_id = '3A70CEC0-392D-4BC1-A27C-06E63A0FD54F'
|
||||
resp = {'rules': [{'id': l7_rule_id}]}
|
||||
lbaas.list_lbaas_l7rules.return_value = resp
|
||||
resp = iter([{'id': l7_rule_id}])
|
||||
lbaas.l7_rules.return_value = resp
|
||||
|
||||
ret = cls._find_l7_rule(m_driver, l7_rule)
|
||||
lbaas.list_lbaas_l7rules.assert_called_once_with(
|
||||
lbaas.l7_rules.assert_called_once_with(
|
||||
l7_rule.l7policy_id,
|
||||
type=l7_rule.type,
|
||||
value=l7_rule.value,
|
||||
@ -1172,11 +1149,11 @@ class TestLBaaSv2Driver(test_base.TestCase):
|
||||
type='HOST_NAME',
|
||||
value='www.test.com')
|
||||
|
||||
resp = {'rules': []}
|
||||
lbaas.list_lbaas_l7rules.return_value = resp
|
||||
resp = iter([])
|
||||
lbaas.l7_rules.return_value = resp
|
||||
|
||||
ret = cls._find_l7_rule(m_driver, l7_rule)
|
||||
lbaas.list_lbaas_l7rules.assert_called_once_with(
|
||||
lbaas.l7_rules.assert_called_once_with(
|
||||
l7_rule.l7policy_id,
|
||||
type=l7_rule.type,
|
||||
value=l7_rule.value,
|
||||
|
@ -608,25 +608,6 @@ class TestLoadBalancerHandler(test_base.TestCase):
|
||||
m_handler.on_deleted.assert_called_once_with(
|
||||
endpoints, lbaas_state)
|
||||
|
||||
@mock.patch('kuryr_kubernetes.objects.lbaas'
|
||||
'.LBaaSServiceSpec')
|
||||
def test_on_deleted(self, m_svc_spec_ctor):
|
||||
endpoints = mock.sentinel.endpoints
|
||||
empty_spec = mock.sentinel.empty_spec
|
||||
lbaas_state = mock.sentinel.lbaas_state
|
||||
m_svc_spec_ctor.return_value = empty_spec
|
||||
|
||||
m_handler = mock.Mock(spec=h_lbaas.LoadBalancerHandler)
|
||||
m_handler._get_lbaas_state.return_value = lbaas_state
|
||||
m_handler._drv_lbaas = mock.Mock()
|
||||
m_handler._drv_lbaas.cascading_capable = False
|
||||
|
||||
h_lbaas.LoadBalancerHandler.on_deleted(m_handler, endpoints)
|
||||
|
||||
m_handler._get_lbaas_state.assert_called_once_with(endpoints)
|
||||
m_handler._sync_lbaas_members.assert_called_once_with(
|
||||
endpoints, lbaas_state, empty_spec)
|
||||
|
||||
@mock.patch('kuryr_kubernetes.objects.lbaas'
|
||||
'.LBaaSServiceSpec')
|
||||
def test_on_cascade_deleted_lb_service(self, m_svc_spec_ctor):
|
||||
@ -641,7 +622,6 @@ class TestLoadBalancerHandler(test_base.TestCase):
|
||||
m_handler._get_lbaas_state.return_value = lbaas_state
|
||||
m_handler._drv_lbaas = mock.Mock()
|
||||
m_handler._drv_service_pub_ip = mock.Mock()
|
||||
m_handler._drv_lbaas.cascading_capable = True
|
||||
|
||||
h_lbaas.LoadBalancerHandler.on_deleted(m_handler, endpoints)
|
||||
|
||||
|
@ -89,21 +89,25 @@ class TestVIFHandler(test_base.TestCase):
|
||||
@mock.patch.object(drivers.PodSecurityGroupsDriver, 'get_instance')
|
||||
@mock.patch.object(drivers.PodSubnetsDriver, 'get_instance')
|
||||
@mock.patch.object(drivers.PodProjectDriver, 'get_instance')
|
||||
def test_init(self, m_get_project_driver, m_get_subnets_driver,
|
||||
m_get_sg_driver, m_get_vif_driver, m_get_vif_pool_driver,
|
||||
m_set_vifs_driver, m_get_multi_vif_drivers):
|
||||
@mock.patch.object(drivers.LBaaSDriver, 'get_instance')
|
||||
def test_init(self, m_get_lbaas_driver, m_get_project_driver,
|
||||
m_get_subnets_driver, m_get_sg_driver, m_get_vif_driver,
|
||||
m_get_vif_pool_driver, m_set_vifs_driver,
|
||||
m_get_multi_vif_drivers):
|
||||
project_driver = mock.sentinel.project_driver
|
||||
subnets_driver = mock.sentinel.subnets_driver
|
||||
sg_driver = mock.sentinel.sg_driver
|
||||
vif_driver = mock.sentinel.vif_driver
|
||||
vif_pool_driver = mock.Mock(spec=drivers.VIFPoolDriver)
|
||||
multi_vif_drivers = [mock.MagicMock(spec=drivers.MultiVIFDriver)]
|
||||
lbaas_driver = mock.sentinel.lbaas_driver
|
||||
m_get_project_driver.return_value = project_driver
|
||||
m_get_subnets_driver.return_value = subnets_driver
|
||||
m_get_sg_driver.return_value = sg_driver
|
||||
m_get_vif_driver.return_value = vif_driver
|
||||
m_get_vif_pool_driver.return_value = vif_pool_driver
|
||||
m_get_multi_vif_drivers.return_value = multi_vif_drivers
|
||||
m_get_lbaas_driver.return_value = lbaas_driver
|
||||
|
||||
handler = h_vif.VIFHandler()
|
||||
|
||||
@ -112,6 +116,7 @@ class TestVIFHandler(test_base.TestCase):
|
||||
self.assertEqual(sg_driver, handler._drv_sg)
|
||||
self.assertEqual(vif_pool_driver, handler._drv_vif_pool)
|
||||
self.assertEqual(multi_vif_drivers, handler._drv_multi_vif)
|
||||
self.assertEqual(lbaas_driver, handler._drv_lbaas)
|
||||
|
||||
def test_is_pending_node(self):
|
||||
self.assertTrue(h_vif.VIFHandler._is_pending_node(self._pod))
|
||||
|
@ -21,60 +21,28 @@ from kuryr_kubernetes.tests import base as test_base
|
||||
|
||||
class TestK8sClient(test_base.TestCase):
|
||||
|
||||
@mock.patch('openstack.connection.Connection')
|
||||
@mock.patch('kuryr_kubernetes.config.CONF')
|
||||
@mock.patch('kuryr_kubernetes.k8s_client.K8sClient')
|
||||
@mock.patch('kuryr.lib.utils.get_neutron_client')
|
||||
def test_setup_clients_lbaasv2(self, m_neutron, m_k8s, m_cfg):
|
||||
def test_setup_clients(self, m_neutron, m_k8s, m_cfg, m_openstack):
|
||||
k8s_api_root = 'http://127.0.0.1:1234'
|
||||
|
||||
neutron_mock = mock.Mock()
|
||||
openstacksdk_mock = mock.Mock()
|
||||
openstacksdk_mock.load_balancer = mock.Mock()
|
||||
k8s_dummy = object()
|
||||
|
||||
neutron_mock.list_extensions.return_value = {
|
||||
'extensions': [
|
||||
{'alias': 'lbaasv2',
|
||||
'description': 'Provides Load Balancing',
|
||||
'links': [],
|
||||
'name': 'Load Balancing v2',
|
||||
'updated': '2017-11-28T09:00:00-00:00'}]}
|
||||
|
||||
m_cfg.kubernetes.api_root = k8s_api_root
|
||||
m_neutron.return_value = neutron_mock
|
||||
m_k8s.return_value = k8s_dummy
|
||||
m_openstack.return_value = openstacksdk_mock
|
||||
|
||||
clients.setup_clients()
|
||||
|
||||
m_k8s.assert_called_with(k8s_api_root)
|
||||
self.assertIs(k8s_dummy, clients.get_kubernetes_client())
|
||||
self.assertIs(neutron_mock, clients.get_neutron_client())
|
||||
self.assertIs(neutron_mock, clients.get_loadbalancer_client())
|
||||
|
||||
@mock.patch('neutronclient.client.construct_http_client')
|
||||
@mock.patch('kuryr.lib.utils.get_auth_plugin')
|
||||
@mock.patch('kuryr_kubernetes.config.CONF')
|
||||
@mock.patch('kuryr_kubernetes.k8s_client.K8sClient')
|
||||
@mock.patch('kuryr.lib.utils.get_neutron_client')
|
||||
def test_setup_clients_octavia(self, m_neutron, m_k8s, m_cfg,
|
||||
m_auth_plugin, m_construct_http_client):
|
||||
k8s_api_root = 'http://127.0.0.1:1234'
|
||||
|
||||
neutron_mock = mock.Mock()
|
||||
k8s_dummy = object()
|
||||
|
||||
neutron_mock.list_extensions.return_value = {
|
||||
'extensions': []}
|
||||
|
||||
octavia_httpclient = mock.sentinel.octavia_httpclient
|
||||
m_construct_http_client.return_value = octavia_httpclient
|
||||
m_auth_plugin.return_value = mock.sentinel.auth_plugin
|
||||
m_cfg.kubernetes.api_root = k8s_api_root
|
||||
m_neutron.return_value = neutron_mock
|
||||
m_k8s.return_value = k8s_dummy
|
||||
|
||||
clients.setup_clients()
|
||||
|
||||
m_k8s.assert_called_with(k8s_api_root)
|
||||
self.assertIs(k8s_dummy, clients.get_kubernetes_client())
|
||||
self.assertIs(neutron_mock, clients.get_neutron_client())
|
||||
self.assertIs(octavia_httpclient,
|
||||
clients.get_loadbalancer_client().httpclient)
|
||||
self.assertIs(openstacksdk_mock, clients.get_openstacksdk())
|
||||
self.assertIs(openstacksdk_mock.load_balancer,
|
||||
clients.get_loadbalancer_client())
|
||||
|
@ -55,7 +55,7 @@ netaddr==0.7.19
|
||||
netifaces==0.10.6
|
||||
neutron-lib==1.13.0
|
||||
openstackdocstheme==1.18.1
|
||||
openstacksdk==0.12.0
|
||||
openstacksdk==0.13.0
|
||||
os-client-config==1.29.0
|
||||
os-service-types==1.2.0
|
||||
os-testr==1.0.0
|
||||
|
@ -8,6 +8,7 @@ kuryr-lib>=0.5.0 # Apache-2.0
|
||||
pbr!=2.1.0,>=2.0.0 # Apache-2.0
|
||||
requests>=2.14.2 # Apache-2.0
|
||||
eventlet!=0.18.3,!=0.20.1,!=0.21.0,>=0.18.2 # MIT
|
||||
openstacksdk>=0.13.0 # Apache-2.0
|
||||
oslo.cache>=1.26.0 # Apache-2.0
|
||||
oslo.config>=5.2.0 # Apache-2.0
|
||||
oslo.log>=3.36.0 # Apache-2.0
|
||||
|
Loading…
x
Reference in New Issue
Block a user