Capture port deleted event associated to HM ServiceMonitor
When the port associated to a VM is deleted, if a ServiceMonitor row is associated to that port an OVN SB event is triggered in order to communicate that the ServiceMonitor entry is been deleted. That event would allow us to update the status of member/pool/LB according to the event. This patch suscribes the driver agent to the DELETE events for table ServiceMonitor on the OVN SB DB in order to update the operating_status when the VM port is deleted. Closes-Bug: #1989460 Change-Id: I9f8c5668c260a896f5c5848734854551c842db1e
This commit is contained in:
@@ -103,3 +103,7 @@ LB_SELECTION_FIELDS_MAP = {
|
||||
constants.LB_ALGORITHM_SOURCE_IP: ["ip_src", "ip_dst"],
|
||||
None: ["ip_src", "ip_dst", "tp_src", "tp_dst"],
|
||||
}
|
||||
|
||||
# HM events status
|
||||
HM_EVENT_MEMBER_PORT_ONLINE = ['online']
|
||||
HM_EVENT_MEMBER_PORT_OFFLINE = ['offline']
|
||||
|
||||
@@ -72,7 +72,7 @@ class ServiceMonitorUpdateEvent(row_event.RowEvent):
|
||||
|
||||
def __init__(self, driver):
|
||||
table = 'Service_Monitor'
|
||||
events = (self.ROW_UPDATE,)
|
||||
events = (self.ROW_UPDATE, self.ROW_DELETE)
|
||||
super().__init__(events, table, None)
|
||||
self.event_name = 'ServiceMonitorUpdateEvent'
|
||||
self.driver = driver
|
||||
@@ -82,4 +82,7 @@ class ServiceMonitorUpdateEvent(row_event.RowEvent):
|
||||
'%(event)s, %(row)s',
|
||||
{'event': event,
|
||||
'row': row})
|
||||
self.driver.hm_update_event_handler(row)
|
||||
if event == self.ROW_DELETE:
|
||||
self.driver.hm_update_event_handler(row, sm_delete_event=True)
|
||||
elif event == self.ROW_UPDATE:
|
||||
self.driver.hm_update_event_handler(row)
|
||||
|
||||
@@ -2553,7 +2553,12 @@ class OvnProviderHelper():
|
||||
('protocol', '=', row.protocol[0])).execute()
|
||||
return lbs if lbs else None
|
||||
|
||||
def hm_update_event_handler(self, row):
|
||||
def hm_update_event_handler(self, row, sm_delete_event=False):
|
||||
# NOTE(froyo): When a delete event is triggered, the Service_Monitor
|
||||
# deleted row will include the last valid information, e.g. when the
|
||||
# port is directly removed from the VM, the status will be 'online',
|
||||
# in order to protect from this behaviour, we will set manually the
|
||||
# status to 'offline' if sm_delete_event is reported as True.
|
||||
try:
|
||||
ovn_lb = self._get_lbs_on_hm_event(row)
|
||||
except idlutils.RowNotFound:
|
||||
@@ -2564,10 +2569,14 @@ class OvnProviderHelper():
|
||||
LOG.debug("Load balancer not found")
|
||||
return
|
||||
|
||||
request_info = {'ovn_lb': ovn_lb,
|
||||
'ip': row.ip,
|
||||
'port': str(row.port),
|
||||
'status': row.status}
|
||||
request_info = {
|
||||
"ovn_lb": ovn_lb,
|
||||
"ip": row.ip,
|
||||
"port": str(row.port),
|
||||
"status": row.status
|
||||
if not sm_delete_event
|
||||
else ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE,
|
||||
}
|
||||
self.add_request({'type': ovn_const.REQ_TYPE_HM_UPDATE_EVENT,
|
||||
'info': request_info})
|
||||
|
||||
@@ -2713,7 +2722,7 @@ class OvnProviderHelper():
|
||||
LOG.warning('Member for event not found, info: %s', info)
|
||||
else:
|
||||
member_status = constants.ONLINE
|
||||
if info['status'] == ['offline']:
|
||||
if info['status'] == ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE:
|
||||
member_status = constants.ERROR
|
||||
|
||||
self._update_member_status(ovn_lb, member_id, member_status)
|
||||
|
||||
@@ -3691,14 +3691,42 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': src_ip,
|
||||
'port': self.member_port,
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ['offline']})
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE})
|
||||
self.hm_update_event.run('update', row, mock.ANY)
|
||||
expected = {
|
||||
'info':
|
||||
{'ovn_lb': [self.ovn_hm_lb],
|
||||
'ip': self.member_address,
|
||||
'port': self.member_port,
|
||||
'status': ['offline']},
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE},
|
||||
'type': 'hm_update_event'}
|
||||
self.mock_add_request.assert_called_once_with(expected)
|
||||
self.helper.ovn_nbdb_api.db_find_rows.assert_called_once_with(
|
||||
'Load_Balancer',
|
||||
('ip_port_mappings', '=',
|
||||
{self.member_address: 'a-logical-port:' + src_ip}),
|
||||
('protocol', '=', self.ovn_hm_lb.protocol[0]))
|
||||
|
||||
def test_hm_update_event_offline_by_delete(self):
|
||||
self.helper.ovn_nbdb_api.db_find_rows.return_value.\
|
||||
execute.return_value = [self.ovn_hm_lb]
|
||||
self.hm_update_event = ovn_event.ServiceMonitorUpdateEvent(
|
||||
self.helper)
|
||||
src_ip = '10.22.33.4'
|
||||
row = fakes.FakeOvsdbRow.create_one_ovsdb_row(
|
||||
attrs={'ip': self.member_address,
|
||||
'logical_port': 'a-logical-port',
|
||||
'src_ip': src_ip,
|
||||
'port': self.member_port,
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_ONLINE})
|
||||
self.hm_update_event.run('delete', row, mock.ANY)
|
||||
expected = {
|
||||
'info':
|
||||
{'ovn_lb': [self.ovn_hm_lb],
|
||||
'ip': self.member_address,
|
||||
'port': self.member_port,
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE},
|
||||
'type': 'hm_update_event'}
|
||||
self.mock_add_request.assert_called_once_with(expected)
|
||||
self.helper.ovn_nbdb_api.db_find_rows.assert_called_once_with(
|
||||
@@ -3718,7 +3746,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': '10.22.33.4',
|
||||
'port': self.member_port,
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ['offline']})
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE})
|
||||
self.hm_update_event.run('update', row, mock.ANY)
|
||||
self.mock_add_request.assert_not_called()
|
||||
|
||||
@@ -3733,7 +3761,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': '10.22.33.4',
|
||||
'port': self.member_port,
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ['offline']})
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE})
|
||||
self.hm_update_event.run('update', row, mock.ANY)
|
||||
self.mock_add_request.assert_not_called()
|
||||
|
||||
@@ -3768,7 +3796,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': '10.22.33.4',
|
||||
'port': port,
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ['offline']}
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE}
|
||||
|
||||
status = self.helper.hm_update_event(info)
|
||||
self.assertIsNone(status)
|
||||
@@ -3890,7 +3918,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': '10.22.33.4',
|
||||
'port': '8080',
|
||||
'protocol': self.ovn_hm_lb.protocol,
|
||||
'status': ['offline']}
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE}
|
||||
self._update_member_status(self.ovn_hm_lb, member['id'], 'offline')
|
||||
self._update_member_status(ovn_hm_lb_2, member_2['id'], 'offline')
|
||||
status = self.helper.hm_update_event(info)
|
||||
@@ -4011,7 +4039,7 @@ class TestOvnProviderHelper(ovn_base.TestOvnOctaviaBase):
|
||||
'src_ip': '10.22.33.4',
|
||||
'port': '8081',
|
||||
'protocol': ovn_hm_lb2.protocol,
|
||||
'status': ['offline']}
|
||||
'status': ovn_const.HM_EVENT_MEMBER_PORT_OFFLINE}
|
||||
|
||||
status = self.helper.hm_update_event(info)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user