Ensure HM also apply to FIPs associated to LB VIPs

Currently, if a FIP gets associated to a LB with HealthMonitors
it is not included as a new OVN Load Balancer Health Checks. This
means that if the VIP is used, traffic will not be redirected to
the dead members, buit if the FIP is used there is no health checks
being applied and traffic will reach dead members.

This patch adds the extra functionality so that an extra OVN
Load Balancer Health Check is created for the FIPs associated to
the Load Balancer.

Closes-Bug: #1997418

Change-Id: Idbf1fb15076518092ce5fdaa57500d29342f51be
(cherry picked from commit ba4ea1134b)
This commit is contained in:
Luis Tomas Bolivar 2023-02-17 08:18:13 +01:00
parent 776e96bf8b
commit f95301ef51
2 changed files with 340 additions and 73 deletions

View File

@ -1547,6 +1547,8 @@ class OvnProviderHelper():
constants.LOADBALANCERS: [
{constants.ID: listener[constants.LOADBALANCER_ID],
constants.PROVISIONING_STATUS: constants.ACTIVE}]}
if ovn_lb.health_check:
self._update_lbhc_vip(ovn_lb, listener[constants.PROTOCOL_PORT])
return status
def pool_create(self, pool):
@ -2181,12 +2183,36 @@ class OvnProviderHelper():
commands.append(
self.ovn_nbdb_api.db_set('Load_Balancer', ovn_lb.uuid,
('external_ids', vip_fip_info)))
if ovn_lb.health_check:
kwargs = {
'vip': fip_info['vip_fip'],
'options': ovn_lb.health_check[0].options,
'external_ids': ovn_lb.health_check[0].external_ids}
with self.ovn_nbdb_api.transaction(check_error=True) as txn:
fip_lbhc = txn.add(
self.ovn_nbdb_api.db_create(
'Load_Balancer_Health_Check', **kwargs))
txn.add(self.ovn_nbdb_api.db_add(
'Load_Balancer', ovn_lb.uuid,
'health_check', fip_lbhc))
else:
external_ids.pop(ovn_const.LB_EXT_IDS_VIP_FIP_KEY)
commands.append(
self.ovn_nbdb_api.db_remove(
'Load_Balancer', ovn_lb.uuid, 'external_ids',
(ovn_const.LB_EXT_IDS_VIP_FIP_KEY)))
old_fip = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_VIP_FIP_KEY)
for lbhc in ovn_lb.health_check:
# FIPs can only be ipv4, so not dealing with ipv6 [] here
if lbhc.vip.split(":")[0] == old_fip:
commands.append(
self.ovn_nbdb_api.db_remove('Load_Balancer',
ovn_lb.uuid,
'health_check',
lbhc.uuid))
commands.append(self.ovn_nbdb_api.db_destroy(
'Load_Balancer_Health_Check', lbhc.uuid))
break
commands.extend(self._refresh_lb_vips(ovn_lb.uuid, external_ids))
self._execute_commands(commands)
@ -2305,6 +2331,7 @@ class OvnProviderHelper():
# Load_Balancer_Health_Check vip="${LB_VIP_ADDR}\:${MONITOR_PRT}")
# In our case the monitor port will be the members protocol port
vip = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_VIP_KEY)
fip = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_VIP_FIP_KEY)
if not vip:
LOG.error("Could not find VIP for HM %s, LB external_ids: %s",
hm_id, ovn_lb.external_ids)
@ -2313,8 +2340,12 @@ class OvnProviderHelper():
if not vip_port:
# This is not fatal as we can add it when a listener is created
vip = []
if fip:
fip = []
else:
vip = vip + ':' + vip_port
if fip:
fip = fip + ':' + vip_port
# ovn-nbctl --wait=sb --
# set Load_Balancer_Health_Check ${ID} options:\"interval\"=6 --
@ -2338,6 +2369,12 @@ class OvnProviderHelper():
'vip': vip,
'options': options,
'external_ids': external_ids}
if fip is not None:
fip_kwargs = {
'vip': fip,
'options': options,
'external_ids': external_ids}
operating_status = constants.ONLINE
if not info['admin_state_up']:
operating_status = constants.OFFLINE
@ -2354,6 +2391,14 @@ class OvnProviderHelper():
txn.add(self.ovn_nbdb_api.db_add(
'Load_Balancer', ovn_lb.uuid,
'health_check', health_check))
if fip is not None:
fip_health_check = txn.add(
self.ovn_nbdb_api.db_create(
'Load_Balancer_Health_Check',
**fip_kwargs))
txn.add(self.ovn_nbdb_api.db_add(
'Load_Balancer', ovn_lb.uuid,
'health_check', fip_health_check))
hms_key.append(hm_id)
txn.add(self.ovn_nbdb_api.db_set(
'Load_Balancer', ovn_lb.uuid,
@ -2368,23 +2413,59 @@ class OvnProviderHelper():
return status
def _update_lbhc_vip(self, ovn_lb, vip_port):
lbhc = self._lookup_lbhc_by_hm_id(ovn_lb.health_check)
if not lbhc:
LOG.error("Could not find HC with key: %s", ovn_lb.health_check)
return False
vip = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_VIP_KEY)
if not vip:
LOG.error("Could not find VIP for HC %s, LB external_ids: %s",
lbhc.uuid, ovn_lb.external_ids)
LOG.error("Could not find VIP for LB external_ids: %s",
ovn_lb.external_ids)
return False
vip_version = netaddr.IPAddress(vip).version
if vip_version == 6:
vip_lbhc = [lbhc for lbhc in ovn_lb.health_check
if lbhc.vip == [] or lbhc.vip[1:].split("]")[0] == vip]
else:
vip_lbhc = [lbhc for lbhc in ovn_lb.health_check
if lbhc.vip == [] or lbhc.vip.split(":")[0] == vip]
if not vip_lbhc:
LOG.error("Could not find HC associated to VIP: %s", vip)
return False
vip = vip + ':' + str(vip_port)
fip = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_VIP_FIP_KEY)
create_fip_lbhc = False
if fip:
fip = fip + ':' + str(vip_port)
if len(ovn_lb.health_check) != 2:
LOG.warning("There should be two HCs associated to the Load "
"Balancer %s as it has a FIP associated to it",
ovn_lb.uuid)
create_fip_lbhc = True
commands = []
commands.append(
self.ovn_nbdb_api.db_set(
'Load_Balancer_Health_Check', lbhc.uuid,
'Load_Balancer_Health_Check', ovn_lb.health_check[0].uuid,
('vip', vip)))
if fip:
if create_fip_lbhc:
# For upgrades purposes we need to recover from this situation
# and create the health_check for the FIP
kwargs = {
'vip': fip,
'options': vip_lbhc[0].options,
'external_ids': vip_lbhc[0].external_ids}
with self.ovn_nbdb_api.transaction(check_error=True) as txn:
fip_lbhc = txn.add(
self.ovn_nbdb_api.db_create(
'Load_Balancer_Health_Check', **kwargs))
txn.add(self.ovn_nbdb_api.db_add(
'Load_Balancer', ovn_lb.uuid,
'health_check', fip_lbhc))
else:
commands.append(
self.ovn_nbdb_api.db_set(
'Load_Balancer_Health_Check',
ovn_lb.health_check[1].uuid, ('vip', fip)))
self._execute_commands(commands)
return True
@ -2470,13 +2551,16 @@ class OvnProviderHelper():
self._execute_commands(commands)
return True
def _lookup_lbhc_by_hm_id(self, hm_id):
lbhcs = self.ovn_nbdb_api.db_list_rows(
def _lookup_lbhcs_by_hm_id(self, hm_id):
lbhc_rows = self.ovn_nbdb_api.db_list_rows(
'Load_Balancer_Health_Check').execute(check_error=True)
for lbhc in lbhcs:
lbhcs = []
for lbhc in lbhc_rows:
if (ovn_const.LB_EXT_IDS_HM_KEY in lbhc.external_ids and
lbhc.external_ids[ovn_const.LB_EXT_IDS_HM_KEY] == hm_id):
return lbhc
lbhcs.append(lbhc)
if lbhcs:
return lbhcs
raise idlutils.RowNotFound(table='Load_Balancer_Health_Check',
col='external_ids', match=hm_id)
@ -2491,12 +2575,12 @@ class OvnProviderHelper():
break
try:
lbhc = self._lookup_lbhc_by_hm_id(hm_id)
lbhcs = self._lookup_lbhcs_by_hm_id(hm_id)
except idlutils.RowNotFound:
LOG.debug("Loadbalancer health check %s not found!", hm_id)
return None, ovn_lb
return [], ovn_lb
return lbhc, ovn_lb
return lbhcs, ovn_lb
def hm_create(self, info):
status = {
@ -2579,8 +2663,8 @@ class OvnProviderHelper():
hm_id = info[constants.ID]
pool_id = info[constants.POOL_ID]
lbhc, ovn_lb = self._find_ovn_lb_from_hm_id(hm_id)
if not lbhc:
lbhcs, ovn_lb = self._find_ovn_lb_from_hm_id(hm_id)
if not lbhcs:
LOG.debug("Loadbalancer health check %s not found!", hm_id)
return status
if not ovn_lb:
@ -2602,10 +2686,11 @@ class OvnProviderHelper():
options['failure_count'] = str(info['failure_count'])
commands = []
commands.append(
self.ovn_nbdb_api.db_set(
'Load_Balancer_Health_Check', lbhc.uuid,
('options', options)))
for lbhc in lbhcs:
commands.append(
self.ovn_nbdb_api.db_set(
'Load_Balancer_Health_Check', lbhc.uuid,
('options', options)))
self._execute_commands(commands)
operating_status = constants.ONLINE
@ -2631,8 +2716,8 @@ class OvnProviderHelper():
{constants.ID: hm_id,
constants.OPERATING_STATUS: constants.NO_MONITOR,
constants.PROVISIONING_STATUS: constants.DELETED}]}
lbhc, ovn_lb = self._find_ovn_lb_from_hm_id(hm_id)
if not lbhc or not ovn_lb:
lbhcs, ovn_lb = self._find_ovn_lb_from_hm_id(hm_id)
if not lbhcs or not ovn_lb:
LOG.debug("Loadbalancer Health Check associated to Health Monitor "
"%s not found in OVN Northbound DB. Setting the "
"Loadbalancer Health Monitor status to DELETED in "
@ -2663,23 +2748,25 @@ class OvnProviderHelper():
hms_key = ovn_lb.external_ids.get(ovn_const.LB_EXT_IDS_HMS_KEY, [])
if hms_key:
hms_key = jsonutils.loads(hms_key)
if lbhc.uuid in hms_key:
hms_key.remove(lbhc.uuid)
if hm_id in hms_key:
hms_key.remove(hm_id)
commands = []
commands.append(
self.ovn_nbdb_api.db_clear('Load_Balancer', ovn_lb.uuid,
'ip_port_mappings'))
commands.append(
self.ovn_nbdb_api.db_remove('Load_Balancer', ovn_lb.uuid,
'health_check', lbhc.uuid))
for lbhc in lbhcs:
commands.append(
self.ovn_nbdb_api.db_remove('Load_Balancer', ovn_lb.uuid,
'health_check', lbhc.uuid))
commands.append(
self.ovn_nbdb_api.db_set(
'Load_Balancer', ovn_lb.uuid,
('external_ids', {ovn_const.LB_EXT_IDS_HMS_KEY:
jsonutils.dumps(hms_key)})))
commands.append(
self.ovn_nbdb_api.db_destroy('Load_Balancer_Health_Check',
lbhc.uuid))
for lbhc in lbhcs:
commands.append(
self.ovn_nbdb_api.db_destroy('Load_Balancer_Health_Check',
lbhc.uuid))
self._execute_commands(commands)
# Delete the hm port if not in use by other health monitors

View File

@ -2992,9 +2992,77 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
'_find_ovn_lbs')
def test_handle_vip_fip_disassociate(self, flb):
lb = mock.MagicMock()
vip_fip = '10.0.0.123'
external_ids = {
'neutron:vip': '172.26.21.20',
'neutron:vip_fip': vip_fip}
lb.external_ids = external_ids
lb_hc = mock.MagicMock()
lb_hc.uuid = "fake_lb_hc"
lb_hc.vip = "{}:80".format(vip_fip)
lb.health_check = [lb_hc]
fip_info = {
'action': 'disassociate',
'vip_fip': None,
'vip_fip': vip_fip,
'ovn_lb': lb}
flb.return_value = lb
self.helper.handle_vip_fip(fip_info)
calls = [
mock.call.db_remove(
'Load_Balancer', lb.uuid, 'external_ids', 'neutron:vip_fip'),
mock.call.db_remove(
'Load_Balancer', lb.uuid, 'health_check', lb_hc.uuid),
mock.call.db_destroy('Load_Balancer_Health_Check', lb_hc.uuid),
mock.call.db_clear('Load_Balancer', lb.uuid, 'vips'),
mock.call.db_set('Load_Balancer', lb.uuid, ('vips', {}))]
self.helper.ovn_nbdb_api.assert_has_calls(calls)
@mock.patch('ovn_octavia_provider.helper.OvnProviderHelper.'
'_find_ovn_lbs')
def test_handle_vip_fip_disassociate_no_lbhc(self, flb):
lb = mock.MagicMock()
vip_fip = '10.0.0.123'
external_ids = {
'neutron:vip': '172.26.21.20',
'neutron:vip_fip': vip_fip}
lb.external_ids = external_ids
lb.health_check = []
fip_info = {
'action': 'disassociate',
'vip_fip': vip_fip,
'ovn_lb': lb}
flb.return_value = lb
self.helper.handle_vip_fip(fip_info)
calls = [
mock.call.db_remove(
'Load_Balancer', lb.uuid, 'external_ids', 'neutron:vip_fip'),
mock.call.db_clear('Load_Balancer', lb.uuid, 'vips'),
mock.call.db_set('Load_Balancer', lb.uuid, ('vips', {}))]
self.helper.ovn_nbdb_api.assert_has_calls(calls)
@mock.patch('ovn_octavia_provider.helper.OvnProviderHelper.'
'_find_ovn_lbs')
def test_handle_vip_fip_disassociate_no_matching_lbhc(self, flb):
lb = mock.MagicMock()
vip_fip = '10.0.0.123'
external_ids = {
'neutron:vip': '172.26.21.20',
'neutron:vip_fip': vip_fip}
lb.external_ids = external_ids
lb_hc = mock.MagicMock()
lb_hc.uuid = "fake_lb_hc"
lb_hc.vip = "10.0.0.222:80"
lb.health_check = [lb_hc]
lb.health_check = []
fip_info = {
'action': 'disassociate',
'vip_fip': vip_fip,
'ovn_lb': lb}
flb.return_value = lb
self.helper.handle_vip_fip(fip_info)
@ -3026,16 +3094,61 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
fb.return_value = lb
self.helper.handle_vip_fip(fip_info)
calls = [
mock.call.db_set(
'Load_Balancer', lb.uuid,
('external_ids', {'neutron:vip_fip': '10.0.0.123'})),
mock.call.db_clear('Load_Balancer', lb.uuid, 'vips'),
mock.call.db_set(
'Load_Balancer', lb.uuid,
('vips', {'10.0.0.123:80': '192.168.2.149:1010',
'172.26.21.20:80': '192.168.2.149:1010'}))]
self.helper.ovn_nbdb_api.assert_has_calls(calls)
kwargs = {
'vip': fip_info['vip_fip'],
'options': lb.health_check[0].options,
'external_ids': lb.health_check[0].external_ids}
self.helper.ovn_nbdb_api.db_create.assert_called_once_with(
'Load_Balancer_Health_Check', **kwargs)
self.helper.ovn_nbdb_api.db_add.assert_called_once_with(
'Load_Balancer', lb.uuid, 'health_check', mock.ANY)
expected_db_set_calls = [
mock.call('Load_Balancer', lb.uuid,
('external_ids', {'neutron:vip_fip': '10.0.0.123'})),
mock.call('Load_Balancer', lb.uuid,
('vips', {'10.0.0.123:80': '192.168.2.149:1010',
'172.26.21.20:80': '192.168.2.149:1010'}))
]
self.helper.ovn_nbdb_api.db_set.assert_has_calls(expected_db_set_calls)
self.helper.ovn_nbdb_api.db_clear.assert_called_once_with(
'Load_Balancer', lb.uuid, 'vips')
@mock.patch('ovn_octavia_provider.helper.OvnProviderHelper.'
'_find_ovn_lbs')
def test_handle_vip_fip_associate_no_lbhc(self, fb):
lb = mock.MagicMock()
fip_info = {
'action': 'associate',
'vip_fip': '10.0.0.123',
'ovn_lb': lb}
members = 'member_%s_%s:%s_%s' % (self.member_id,
self.member_address,
self.member_port,
self.member_subnet_id)
external_ids = {
'listener_foo': '80:pool_%s' % self.pool_id,
'pool_%s' % self.pool_id: members,
'neutron:vip': '172.26.21.20'}
lb.external_ids = external_ids
lb.health_check = []
fb.return_value = lb
self.helper.handle_vip_fip(fip_info)
self.helper.ovn_nbdb_api.db_create.assert_not_called()
self.helper.ovn_nbdb_api.db_add.assert_not_called()
expected_db_set_calls = [
mock.call('Load_Balancer', lb.uuid,
('external_ids', {'neutron:vip_fip': '10.0.0.123'})),
mock.call('Load_Balancer', lb.uuid,
('vips', {'10.0.0.123:80': '192.168.2.149:1010',
'172.26.21.20:80': '192.168.2.149:1010'}))
]
self.helper.ovn_nbdb_api.db_set.assert_has_calls(expected_db_set_calls)
self.helper.ovn_nbdb_api.db_clear.assert_called_once_with(
'Load_Balancer', lb.uuid, 'vips')
@mock.patch('ovn_octavia_provider.common.clients.get_neutron_client')
def test_handle_member_dvr_lb_has_no_fip(self, net_cli):
@ -3383,7 +3496,8 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch('ovn_octavia_provider.common.clients.get_neutron_client')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_update_hm_members')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_by_pool_id')
def _test_hm_create(self, protocol, members, folbpi, uhm, net_cli):
def _test_hm_create(self, protocol, members, fip, folbpi, uhm,
net_cli):
self._get_pool_listeners.stop()
fake_subnet = fakes.FakeSubnet.create_one_subnet()
pool_key = 'pool_%s' % self.pool_id
@ -3391,6 +3505,8 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
folbpi.return_value = (pool_key, self.ovn_hm_lb)
uhm.return_value = True
net_cli.return_value.show_subnet.return_value = {'subnet': fake_subnet}
if not fip:
del self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_FIP_KEY]
status = self.helper.hm_create(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
constants.ACTIVE)
@ -3411,6 +3527,10 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
constants.ONLINE)
vip = (self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_KEY] +
':' + str(self.listener['protocol_port']))
if fip:
fip = (self.ovn_hm_lb.external_ids[
ovn_const.LB_EXT_IDS_VIP_FIP_KEY] +
':' + str(self.listener['protocol_port']))
options = {'interval': '6',
'timeout': '7',
'failure_count': '5',
@ -3419,21 +3539,39 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
kwargs = {'vip': vip,
'options': options,
'external_ids': external_ids}
self.helper.ovn_nbdb_api.db_create.assert_called_once_with(
'Load_Balancer_Health_Check', **kwargs)
self.helper.ovn_nbdb_api.db_add.assert_called_once_with(
'Load_Balancer', self.ovn_hm_lb.uuid, 'health_check', mock.ANY)
if fip:
fip_kwargs = {'vip': fip,
'options': options,
'external_ids': external_ids}
expected_lbhc_calls = [
mock.call('Load_Balancer_Health_Check', **kwargs)]
if fip:
expected_lbhc_calls.append(
mock.call('Load_Balancer_Health_Check', **fip_kwargs)
)
self.helper.ovn_nbdb_api.db_create.assert_has_calls(
expected_lbhc_calls)
if fip:
self.assertEqual(self.helper.ovn_nbdb_api.db_add.call_count, 2)
else:
self.helper.ovn_nbdb_api.db_add.assert_called_once_with(
'Load_Balancer', self.ovn_hm_lb.uuid, 'health_check', mock.ANY)
def test_hm_create_tcp(self):
self._test_hm_create('tcp', False)
self._test_hm_create('tcp', False, True)
def test_hm_create_tcp_no_fip(self):
self._test_hm_create('tcp', False, False)
def test_hm_create_udp(self):
self._test_hm_create('udp', False)
self._test_hm_create('udp', False, True)
def test_hm_create_tcp_pool_members(self):
pool_key = 'pool_%s' % self.pool_id
self.ovn_hm_lb.external_ids[pool_key] = self.member_line
self._test_hm_create('tcp', True)
self._test_hm_create('tcp', True, True)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_by_pool_id')
def test_hm_create_no_vip_port(self, folbpi):
@ -3456,10 +3594,11 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
kwargs = {'vip': vip,
'options': options,
'external_ids': external_ids}
self.helper.ovn_nbdb_api.db_create.assert_called_once_with(
'Load_Balancer_Health_Check', **kwargs)
self.helper.ovn_nbdb_api.db_add.assert_called_once_with(
'Load_Balancer', self.ovn_hm_lb.uuid, 'health_check', mock.ANY)
expected_lbhc_calls = [
mock.call('Load_Balancer_Health_Check', **kwargs),
mock.call('Load_Balancer_Health_Check', **kwargs)]
self.helper.ovn_nbdb_api.db_create.has_calls(expected_lbhc_calls)
self.assertEqual(self.helper.ovn_nbdb_api.db_add.call_count, 2)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_by_pool_id')
def test_hm_create_offline(self, folbpi):
@ -3603,29 +3742,61 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
self.assertEqual(status['healthmonitors'][0]['operating_status'],
constants.ERROR)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_lookup_lbhc_by_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_get_or_create_ovn_lb')
def test_hm_create_then_listener_create(self, get_ovn_lb, lookup_hm):
get_ovn_lb.return_value = self.ovn_hm_lb
lookup_hm.return_value = self.ovn_hm
self.ovn_hm_lb.health_check = self.ovn_hm
self.listener['admin_state_up'] = True
status = self.helper.listener_create(self.listener)
def test_hm_create_then_listener_create(self, get_ovn_lb):
vip = (self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_KEY] +
':' + str(self.listener['protocol_port']))
fip = (self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_FIP_KEY] +
':' + str(self.listener['protocol_port']))
self.ovn_hm.vip = []
self.ovn_hm_lb.health_check = [self.ovn_hm]
get_ovn_lb.return_value = self.ovn_hm_lb
self.listener['admin_state_up'] = True
kwargs = {
'vip': fip,
'options': self.ovn_hm.options,
'external_ids': self.ovn_hm.external_ids}
status = self.helper.listener_create(self.listener)
self.helper.ovn_nbdb_api.db_set.assert_called_with(
'Load_Balancer_Health_Check', self.ovn_hm.uuid, ('vip', vip))
self.helper.ovn_nbdb_api.db_create.assert_called_with(
'Load_Balancer_Health_Check', **kwargs)
self.helper.ovn_nbdb_api.db_add.assert_called_with(
'Load_Balancer', self.ovn_hm_lb.uuid, 'health_check', mock.ANY)
self.assertEqual(status['listeners'][0]['provisioning_status'],
constants.ACTIVE)
self.assertEqual(status['listeners'][0]['operating_status'],
constants.ONLINE)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_lookup_lbhc_by_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_get_or_create_ovn_lb')
def test_hm_create_then_listener_create_no_fip(self, get_ovn_lb):
vip = (self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_KEY] +
':' + str(self.listener['protocol_port']))
self.ovn_hm.vip = []
self.ovn_hm_lb.health_check = [self.ovn_hm]
get_ovn_lb.return_value = self.ovn_hm_lb
self.listener['admin_state_up'] = True
del self.ovn_hm_lb.external_ids[ovn_const.LB_EXT_IDS_VIP_FIP_KEY]
status = self.helper.listener_create(self.listener)
self.helper.ovn_nbdb_api.db_set.assert_called_with(
'Load_Balancer_Health_Check', self.ovn_hm.uuid, ('vip', vip))
self.helper.ovn_nbdb_api.db_create.assert_not_called()
self.helper.ovn_nbdb_api.db_add.assert_not_called()
self.assertEqual(status['listeners'][0]['provisioning_status'],
constants.ACTIVE)
self.assertEqual(status['listeners'][0]['operating_status'],
constants.ONLINE)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_lookup_lbhcs_by_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_get_or_create_ovn_lb')
def test_hm_create_then_listener_create_no_hm(self, get_ovn_lb, lookup_hm):
get_ovn_lb.return_value = self.ovn_hm_lb
lookup_hm.return_value = None
self.ovn_hm_lb.health_check = self.ovn_hm
lookup_hm.return_value = []
self.ovn_hm_lb.health_check = [self.ovn_hm]
self.listener['admin_state_up'] = True
status = self.helper.listener_create(self.listener)
self.assertEqual(status['listeners'][0]['provisioning_status'],
@ -3634,13 +3805,13 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
constants.ERROR)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_refresh_lb_vips')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_lookup_lbhc_by_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_lookup_lbhcs_by_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_get_or_create_ovn_lb')
def test_hm_create_then_listener_create_no_vip(self, get_ovn_lb,
lookup_hm, refresh_vips):
get_ovn_lb.return_value = self.ovn_hm_lb
lookup_hm.return_value = self.ovn_hm
self.ovn_hm_lb.health_check = self.ovn_hm
lookup_hm.return_value = [self.ovn_hm]
self.ovn_hm_lb.health_check = [self.ovn_hm]
self.ovn_hm_lb.external_ids.pop(ovn_const.LB_EXT_IDS_VIP_KEY)
self.listener['admin_state_up'] = True
status = self.helper.listener_create(self.listener)
@ -3649,9 +3820,18 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
self.assertEqual(status['listeners'][0]['operating_status'],
constants.ERROR)
@mock.patch.object(ovn_helper.OvnProviderHelper, '_update_lbhc_vip')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lbs')
def test_hm_create_then_listener_update(self, find_ovn_lbs,
update_lbhc_vip):
find_ovn_lbs.return_value = self.ovn_hm_lb
self.helper.listener_update(self.listener)
update_lbhc_vip.assert_called_once_with(
self.ovn_hm_lb, self.listener[constants.PROTOCOL_PORT])
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
def test_hm_update(self, folbfhi):
folbfhi.return_value = (self.ovn_hm, self.ovn_hm_lb)
folbfhi.return_value = ([self.ovn_hm], self.ovn_hm_lb)
status = self.helper.hm_update(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
constants.ACTIVE)
@ -3660,7 +3840,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
def test_hm_update_no_admin_state_up(self, folbfhi):
folbfhi.return_value = (self.ovn_hm, self.ovn_hm_lb)
folbfhi.return_value = ([self.ovn_hm], self.ovn_hm_lb)
self.ovn_hm_lb.pop('admin_state_up')
status = self.helper.hm_update(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
@ -3670,7 +3850,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
def test_hm_update_offline(self, folbfhi):
folbfhi.return_value = (self.ovn_hm, self.ovn_hm_lb)
folbfhi.return_value = ([self.ovn_hm], self.ovn_hm_lb)
self.health_monitor['admin_state_up'] = False
status = self.helper.hm_update(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
@ -3680,7 +3860,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
def test_hm_update_hm_not_found(self, folbfhi):
folbfhi.return_value = (None, None)
folbfhi.return_value = ([], None)
status = self.helper.hm_update(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
constants.ERROR)
@ -3690,7 +3870,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_by_pool_id')
def test_hm_update_lb_not_found(self, folbpi, folbfhi):
folbfhi.return_value = (self.ovn_hm, None)
folbfhi.return_value = ([self.ovn_hm], None)
folbpi.return_value = (None, None)
status = self.helper.hm_update(self.health_monitor)
self.assertEqual(status['healthmonitors'][0]['provisioning_status'],
@ -3700,7 +3880,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
@mock.patch.object(ovn_helper.OvnProviderHelper, '_find_ovn_lb_from_hm_id')
def test_hm_update_just_interval(self, folbfhi):
folbfhi.return_value = (self.ovn_hm, self.ovn_hm_lb)
folbfhi.return_value = ([self.ovn_hm], self.ovn_hm_lb)
self.health_monitor['interval'] = 3
self.helper.hm_update(self.health_monitor)
options = {