Add metadata proxy router_update callback handler
This patch implements the callback handler for router update events; This checks if the proxy process monitor is active, and if not, starts the proxy. This is particularly important if the metadata driver misses to receive a create notification due to failures, which in turn generates an update event because of a resync step. Closes-bug: #1623732 Change-Id: I296a37daff1e5f018ae11eb8661c77ad346b8075
This commit is contained in:
parent
f396cf7dcd
commit
a60c2de881
@ -33,11 +33,15 @@ METADATA_SERVICE_NAME = 'metadata-proxy'
|
||||
|
||||
class MetadataDriver(object):
|
||||
|
||||
monitors = {}
|
||||
|
||||
def __init__(self, l3_agent):
|
||||
self.metadata_port = l3_agent.conf.metadata_port
|
||||
self.metadata_access_mark = l3_agent.conf.metadata_access_mark
|
||||
registry.subscribe(
|
||||
after_router_added, resources.ROUTER, events.AFTER_CREATE)
|
||||
registry.subscribe(
|
||||
after_router_updated, resources.ROUTER, events.AFTER_UPDATE)
|
||||
registry.subscribe(
|
||||
before_router_removed, resources.ROUTER, events.BEFORE_DELETE)
|
||||
|
||||
@ -122,6 +126,7 @@ class MetadataDriver(object):
|
||||
callback=callback)
|
||||
pm.enable()
|
||||
monitor.register(uuid, METADATA_SERVICE_NAME, pm)
|
||||
cls.monitors[router_id] = pm
|
||||
|
||||
@classmethod
|
||||
def destroy_monitored_metadata_proxy(cls, monitor, uuid, conf):
|
||||
@ -129,6 +134,7 @@ class MetadataDriver(object):
|
||||
# No need to pass ns name as it's not needed for disable()
|
||||
pm = cls._get_metadata_proxy_process_manager(uuid, conf)
|
||||
pm.disable()
|
||||
cls.monitors.pop(uuid, None)
|
||||
|
||||
@classmethod
|
||||
def _get_metadata_proxy_process_manager(cls, router_id, conf, ns_name=None,
|
||||
@ -161,6 +167,13 @@ def after_router_added(resource, event, l3_agent, **kwargs):
|
||||
router_id=router.router_id)
|
||||
|
||||
|
||||
def after_router_updated(resource, event, l3_agent, **kwargs):
|
||||
router = kwargs['router']
|
||||
proxy = l3_agent.metadata_driver
|
||||
if not proxy.monitors.get(router.router_id):
|
||||
after_router_added(resource, event, l3_agent, **kwargs)
|
||||
|
||||
|
||||
def before_router_removed(resource, event, l3_agent, **kwargs):
|
||||
router = kwargs['router']
|
||||
proxy = l3_agent.metadata_driver
|
||||
|
@ -20,6 +20,7 @@ from oslo_utils import uuidutils
|
||||
from neutron.agent.common import config as agent_config
|
||||
from neutron.agent.l3 import agent as l3_agent
|
||||
from neutron.agent.l3 import ha as l3_ha_agent
|
||||
from neutron.agent.l3 import router_info
|
||||
from neutron.agent.metadata import config
|
||||
from neutron.agent.metadata import driver as metadata_driver
|
||||
from neutron.common import constants
|
||||
@ -79,6 +80,19 @@ class TestMetadataDriverProcess(base.BaseTestCase):
|
||||
cfg.CONF.register_opts(config.SHARED_OPTS)
|
||||
cfg.CONF.register_opts(config.DRIVER_OPTS)
|
||||
|
||||
def test_after_router_updated_called_on_agent_process_update(self):
|
||||
with mock.patch.object(metadata_driver, 'after_router_updated') as f,\
|
||||
mock.patch.object(router_info.RouterInfo, 'process'):
|
||||
agent = l3_agent.L3NATAgent('localhost')
|
||||
router_id = _uuid()
|
||||
router = {'id': router_id}
|
||||
ri = router_info.RouterInfo(router_id, router,
|
||||
agent.conf, mock.ANY)
|
||||
agent.router_info[router_id] = ri
|
||||
agent._process_updated_router(router)
|
||||
f.assert_called_once_with(
|
||||
'router', 'after_update', agent, router=ri)
|
||||
|
||||
def _test_spawn_metadata_proxy(self, expected_user, expected_group,
|
||||
user='', group='', watch_log=True):
|
||||
router_id = _uuid()
|
||||
|
Loading…
Reference in New Issue
Block a user