Update DVR Binding when router_id changes

In this patch I modified update_dvr_port_binding to update the
binding if router_id changes.
When a new router namespace is created on a host, sync_router
will call _ensure_host_set_on_port that will update the DVR binding.
With the current code update_dvr_port_binding won't update the
binding if there's an existing binding unless the binding
vif_type is VIF_TYPE_BINDING_FAILED . There's a race when
the router namespace is destroyed and then recreated on the same
host.
The DVR binding is deleted only when the message
update_device_down is processed. If this message is processed
after the update_dvr_port_binding triggered by the namespace creation,
update_dvr_port_binding won't update the binding. When the router
interface will be detected as UP, no DVR binding will be found.

Closes-bug: #1358554
Change-Id: I37fd4ed67dc2019b57e36d082b584c517d8f67a8
This commit is contained in:
rossella 2014-08-27 10:48:41 +00:00
parent f2c7ee7942
commit 78059968e2
2 changed files with 34 additions and 3 deletions

View File

@ -877,6 +877,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
self._update_port_dict_binding(port, binding)
binding.host = attrs and attrs.get(portbindings.HOST_ID)
binding.router_id = attrs and attrs.get('device_id')
def update_dvr_port_binding(self, context, id, port):
attrs = port['port']
@ -890,12 +891,16 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
session = context.session
binding = db.get_dvr_port_binding_by_host(session, id, host)
if (not binding or
binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED):
device_id = attrs and attrs.get('device_id')
router_id = binding and binding.get('router_id')
update_required = (not binding or
binding.vif_type == portbindings.VIF_TYPE_BINDING_FAILED or
router_id != device_id)
if update_required:
with session.begin(subtransactions=True):
if not binding:
binding = db.ensure_dvr_port_binding(
session, id, host, router_id=attrs['device_id'])
session, id, host, router_id=device_id)
orig_port = super(Ml2Plugin, self).get_port(context, id)
network = self.get_network(context, orig_port['network_id'])
mech_context = driver_context.DvrPortContext(self,

View File

@ -34,6 +34,7 @@ from neutron.plugins.ml2 import db as ml2_db
from neutron.plugins.ml2 import driver_api
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2.drivers import type_vlan
from neutron.plugins.ml2 import models
from neutron.plugins.ml2 import plugin as ml2_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.ml2.drivers import mechanism_logger as mech_logger
@ -351,6 +352,31 @@ class TestMl2PortBinding(Ml2PluginV2TestCase,
port = self._show('ports', port_id)['port']
self._check_port_binding_profile(port, profile)
def test_process_dvr_port_binding_update_router_id(self):
host_id = 'host'
binding = models.DVRPortBinding(
port_id='port_id',
host=host_id,
router_id='old_router_id',
vif_type=portbindings.VIF_TYPE_OVS,
vnic_type=portbindings.VNIC_NORMAL,
cap_port_filter=False,
status=constants.PORT_STATUS_DOWN)
plugin = manager.NeutronManager.get_plugin()
mock_network = {'id': 'net_id'}
context = mock.Mock()
new_router_id = 'new_router'
attrs = {'device_id': new_router_id, portbindings.HOST_ID: host_id}
with mock.patch.object(plugin, '_update_port_dict_binding'):
with mock.patch.object(ml2_db, 'get_network_segments',
return_value=[]):
mech_context = driver_context.DvrPortContext(
self, context, 'port', mock_network, binding)
plugin._process_dvr_port_binding(mech_context, context, attrs)
self.assertEqual(new_router_id,
mech_context._binding.router_id)
self.assertEqual(host_id, mech_context._binding.host)
class TestMl2PortBindingNoSG(TestMl2PortBinding):
HAS_PORT_FILTER = False