Merge "Add unit tests for ML2 DVR port binding and fix PortContext inconsistencies"

This commit is contained in:
Jenkins 2015-05-19 21:38:42 +00:00 committed by Gerrit Code Review
commit a8f4358e3d
8 changed files with 308 additions and 29 deletions

View File

@ -377,15 +377,83 @@ class PortContext(object):
@abc.abstractproperty @abc.abstractproperty
def host(self): def host(self):
"""Return the host associated with the 'current' port.""" """Return the host with which the port is associated.
In the context of a host-specific operation on a distributed
port, the host property indicates the host for which the port
operation is being performed. Otherwise, it is the same value
as current['binding:host_id'].
"""
pass pass
@abc.abstractproperty @abc.abstractproperty
def original_host(self): def original_host(self):
"""Return the host associated with the 'original' port. """Return the original host with which the port was associated.
Method is only valid within calls to update_port_precommit In the context of a host-specific operation on a distributed
and update_port_postcommit. port, the original_host property indicates the host for which
the port operation is being performed. Otherwise, it is the
same value as original['binding:host_id'].
This property is only valid within calls to
update_port_precommit and update_port_postcommit. It returns
None otherwise.
"""
pass
@abc.abstractproperty
def vif_type(self):
"""Return the vif_type indicating the binding state of the port.
In the context of a host-specific operation on a distributed
port, the vif_type property indicates the binding state for
the host for which the port operation is being
performed. Otherwise, it is the same value as
current['binding:vif_type'].
"""
pass
@abc.abstractproperty
def original_vif_type(self):
"""Return the original vif_type of the port.
In the context of a host-specific operation on a distributed
port, the original_vif_type property indicates original
binding state for the host for which the port operation is
being performed. Otherwise, it is the same value as
original['binding:vif_type'].
This property is only valid within calls to
update_port_precommit and update_port_postcommit. It returns
None otherwise.
"""
pass
@abc.abstractproperty
def vif_details(self):
"""Return the vif_details describing the binding of the port.
In the context of a host-specific operation on a distributed
port, the vif_details property describes the binding for the
host for which the port operation is being
performed. Otherwise, it is the same value as
current['binding:vif_details'].
"""
pass
@abc.abstractproperty
def original_vif_details(self):
"""Return the original vif_details of the port.
In the context of a host-specific operation on a distributed
port, the original_vif_details property describes the original
binding for the host for which the port operation is being
performed. Otherwise, it is the same value as
original['binding:vif_details'].
This property is only valid within calls to
update_port_precommit and update_port_postcommit. It returns
None otherwise.
""" """
pass pass

View File

@ -89,8 +89,12 @@ class PortContext(MechanismDriverContext, api.PortContext):
self._new_bound_segment = None self._new_bound_segment = None
self._next_segments_to_bind = None self._next_segments_to_bind = None
if original_port: if original_port:
self._original_vif_type = binding.vif_type
self._original_vif_details = self._plugin._get_vif_details(binding)
self._original_binding_levels = self._binding_levels self._original_binding_levels = self._binding_levels
else: else:
self._original_vif_type = None
self._original_vif_details = None
self._original_binding_levels = None self._original_binding_levels = None
self._new_port_status = None self._new_port_status = None
@ -133,7 +137,10 @@ class PortContext(MechanismDriverContext, api.PortContext):
@property @property
def original_status(self): def original_status(self):
return self._original_port['status'] # REVISIT(rkukura): Should return host-specific status for DVR
# ports. Fix as part of resolving bug 1367391.
if self._original_port:
return self._original_port['status']
@property @property
def network(self): def network(self):
@ -195,7 +202,29 @@ class PortContext(MechanismDriverContext, api.PortContext):
@property @property
def original_host(self): def original_host(self):
return self._original_port.get(portbindings.HOST_ID) # REVISIT(rkukura): Eliminate special DVR case as part of
# resolving bug 1367391?
if self._port['device_owner'] == constants.DEVICE_OWNER_DVR_INTERFACE:
return self._original_port and self._binding.host
else:
return (self._original_port and
self._original_port.get(portbindings.HOST_ID))
@property
def vif_type(self):
return self._binding.vif_type
@property
def original_vif_type(self):
return self._original_vif_type
@property
def vif_details(self):
return self._plugin._get_vif_details(self._binding)
@property
def original_vif_details(self):
return self._original_vif_details
@property @property
def segments_to_bind(self): def segments_to_bind(self):

View File

@ -244,7 +244,7 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
mech_context._clear_binding_levels() mech_context._clear_binding_levels()
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE: if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
binding.vif_type = portbindings.VIF_TYPE_DISTRIBUTED binding.vif_type = portbindings.VIF_TYPE_UNBOUND
binding.vif_details = '' binding.vif_details = ''
db.clear_binding_levels(session, port_id, original_host) db.clear_binding_levels(session, port_id, original_host)
mech_context._clear_binding_levels() mech_context._clear_binding_levels()
@ -434,11 +434,16 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
return (cur_context, commit) return (cur_context, commit)
def _update_port_dict_binding(self, port, binding): def _update_port_dict_binding(self, port, binding):
port[portbindings.HOST_ID] = binding.host
port[portbindings.VNIC_TYPE] = binding.vnic_type port[portbindings.VNIC_TYPE] = binding.vnic_type
port[portbindings.PROFILE] = self._get_profile(binding) port[portbindings.PROFILE] = self._get_profile(binding)
port[portbindings.VIF_TYPE] = binding.vif_type if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
port[portbindings.VIF_DETAILS] = self._get_vif_details(binding) port[portbindings.HOST_ID] = ''
port[portbindings.VIF_TYPE] = portbindings.VIF_TYPE_DISTRIBUTED
port[portbindings.VIF_DETAILS] = {}
else:
port[portbindings.HOST_ID] = binding.host
port[portbindings.VIF_TYPE] = binding.vif_type
port[portbindings.VIF_DETAILS] = self._get_vif_details(binding)
def _get_vif_details(self, binding): def _get_vif_details(self, binding):
if binding.vif_details: if binding.vif_details:

View File

@ -93,7 +93,7 @@ class RpcCallbacks(type_tunnel.TunnelRpcCallbackMixin):
{'device': device, {'device': device,
'agent_id': agent_id, 'agent_id': agent_id,
'network_id': port['network_id'], 'network_id': port['network_id'],
'vif_type': port[portbindings.VIF_TYPE]}) 'vif_type': port_context.vif_type})
return {'device': device} return {'device': device}
if (not host or host == port_context.host): if (not host or host == port_context.host):

View File

@ -112,6 +112,22 @@ class FakePortContext(api.PortContext):
def original_host(self): def original_host(self):
return None return None
@property
def vif_type(self):
return portbindings.UNBOUND
@property
def original_vif_type(self):
return portbindings.UNBOUND
@property
def vif_details(self):
return None
@property
def original_vif_details(self):
return None
@property @property
def segments_to_bind(self): def segments_to_bind(self):
return self._network_context.network_segments return self._network_context.network_segments

View File

@ -85,6 +85,12 @@ class LoggerMechanismDriver(api.MechanismDriver):
network_context = context.network network_context = context.network
LOG.info(_("%(method)s called with port settings %(current)s " LOG.info(_("%(method)s called with port settings %(current)s "
"(original settings %(original)s) " "(original settings %(original)s) "
"host %(host)s "
"(original host %(original_host)s) "
"vif type %(vif_type)s "
"(original vif type %(original_vif_type)s) "
"vif details %(vif_details)s "
"(original vif details %(original_vif_details)s) "
"binding levels %(levels)s " "binding levels %(levels)s "
"(original binding levels %(original_levels)s) " "(original binding levels %(original_levels)s) "
"on network %(network)s " "on network %(network)s "
@ -92,6 +98,12 @@ class LoggerMechanismDriver(api.MechanismDriver):
{'method': method_name, {'method': method_name,
'current': context.current, 'current': context.current,
'original': context.original, 'original': context.original,
'host': context.host,
'original_host': context.original_host,
'vif_type': context.vif_type,
'original_vif_type': context.original_vif_type,
'vif_details': context.vif_details,
'original_vif_details': context.original_vif_details,
'levels': context.binding_levels, 'levels': context.binding_levels,
'original_levels': context.original_binding_levels, 'original_levels': context.original_binding_levels,
'network': network_context.current, 'network': network_context.current,

View File

@ -83,14 +83,12 @@ class TestMechanismDriver(api.MechanismDriver):
def _check_port_context(self, context, original_expected): def _check_port_context(self, context, original_expected):
assert(isinstance(context, api.PortContext)) assert(isinstance(context, api.PortContext))
assert(isinstance(context.current, dict))
assert(context.current['id'] is not None)
vif_type = context.current.get(portbindings.VIF_TYPE) self._check_port_info(context.current, context.host,
assert(vif_type is not None) context.vif_type, context.vif_details)
if vif_type in (portbindings.VIF_TYPE_UNBOUND, if context.vif_type in (portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED): portbindings.VIF_TYPE_BINDING_FAILED):
if (context.segments_to_bind and if (context.segments_to_bind and
context.segments_to_bind[0][api.NETWORK_TYPE] == 'vlan'): context.segments_to_bind[0][api.NETWORK_TYPE] == 'vlan'):
# Partially bound. # Partially bound.
@ -101,20 +99,24 @@ class TestMechanismDriver(api.MechanismDriver):
self._check_unbound(context.binding_levels, self._check_unbound(context.binding_levels,
context.top_bound_segment, context.top_bound_segment,
context.bottom_bound_segment) context.bottom_bound_segment)
assert(context.current['id'] not in self.bound_ports) assert((context.current['id'], context.host)
not in self.bound_ports)
else: else:
self._check_bound(context.binding_levels, self._check_bound(context.binding_levels,
context.top_bound_segment, context.top_bound_segment,
context.bottom_bound_segment) context.bottom_bound_segment)
assert(context.current['id'] in self.bound_ports) assert((context.current['id'], context.host) in self.bound_ports)
if original_expected: if original_expected:
assert(isinstance(context.original, dict)) self._check_port_info(context.original, context.original_host,
context.original_vif_type,
context.original_vif_details)
assert(context.current['id'] == context.original['id']) assert(context.current['id'] == context.original['id'])
vif_type = context.original.get(portbindings.VIF_TYPE)
assert(vif_type is not None) if (context.original_vif_type in
if vif_type in (portbindings.VIF_TYPE_UNBOUND, (portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED): portbindings.VIF_TYPE_BINDING_FAILED)):
self._check_unbound(context.original_binding_levels, self._check_unbound(context.original_binding_levels,
context.original_top_bound_segment, context.original_top_bound_segment,
context.original_bottom_bound_segment) context.original_bottom_bound_segment)
@ -124,6 +126,10 @@ class TestMechanismDriver(api.MechanismDriver):
context.original_bottom_bound_segment) context.original_bottom_bound_segment)
else: else:
assert(context.original is None) assert(context.original is None)
assert(context.original_host is None)
assert(context.original_vif_type is None)
assert(context.original_vif_details is None)
assert(context.original_status is None)
self._check_unbound(context.original_binding_levels, self._check_unbound(context.original_binding_levels,
context.original_top_bound_segment, context.original_top_bound_segment,
context.original_bottom_bound_segment) context.original_bottom_bound_segment)
@ -132,6 +138,27 @@ class TestMechanismDriver(api.MechanismDriver):
assert(isinstance(network_context, api.NetworkContext)) assert(isinstance(network_context, api.NetworkContext))
self._check_network_context(network_context, False) self._check_network_context(network_context, False)
def _check_port_info(self, port, host, vif_type, vif_details):
assert(isinstance(port, dict))
assert(port['id'] is not None)
assert(vif_type in (portbindings.VIF_TYPE_UNBOUND,
portbindings.VIF_TYPE_BINDING_FAILED,
portbindings.VIF_TYPE_DISTRIBUTED,
portbindings.VIF_TYPE_OVS,
portbindings.VIF_TYPE_BRIDGE))
if port['device_owner'] == const.DEVICE_OWNER_DVR_INTERFACE:
assert(port[portbindings.HOST_ID] == '')
assert(port[portbindings.VIF_TYPE] ==
portbindings.VIF_TYPE_DISTRIBUTED)
assert(port[portbindings.VIF_DETAILS] == {})
else:
assert(port[portbindings.HOST_ID] == host)
assert(port[portbindings.VIF_TYPE] !=
portbindings.VIF_TYPE_DISTRIBUTED)
assert(port[portbindings.VIF_TYPE] == vif_type)
assert(isinstance(vif_details, dict))
assert(port[portbindings.VIF_DETAILS] == vif_details)
def _check_unbound(self, levels, top_segment, bottom_segment): def _check_unbound(self, levels, top_segment, bottom_segment):
assert(levels is None) assert(levels is None)
assert(top_segment is None) assert(top_segment is None)
@ -159,7 +186,8 @@ class TestMechanismDriver(api.MechanismDriver):
def update_port_precommit(self, context): def update_port_precommit(self, context):
if (context.original_top_bound_segment and if (context.original_top_bound_segment and
not context.top_bound_segment): not context.top_bound_segment):
self.bound_ports.remove(context.original['id']) self.bound_ports.remove((context.original['id'],
context.original_host))
self._check_port_context(context, True) self._check_port_context(context, True)
def update_port_postcommit(self, context): def update_port_postcommit(self, context):
@ -180,16 +208,16 @@ class TestMechanismDriver(api.MechanismDriver):
if host == "host-ovs-no_filter": if host == "host-ovs-no_filter":
context.set_binding(segment_id, portbindings.VIF_TYPE_OVS, context.set_binding(segment_id, portbindings.VIF_TYPE_OVS,
{portbindings.CAP_PORT_FILTER: False}) {portbindings.CAP_PORT_FILTER: False})
self.bound_ports.add(context.current['id']) self.bound_ports.add((context.current['id'], host))
elif host == "host-bridge-filter": elif host == "host-bridge-filter":
context.set_binding(segment_id, portbindings.VIF_TYPE_BRIDGE, context.set_binding(segment_id, portbindings.VIF_TYPE_BRIDGE,
{portbindings.CAP_PORT_FILTER: True}) {portbindings.CAP_PORT_FILTER: True})
self.bound_ports.add(context.current['id']) self.bound_ports.add((context.current['id'], host))
elif host == "host-ovs-filter-active": elif host == "host-ovs-filter-active":
context.set_binding(segment_id, portbindings.VIF_TYPE_OVS, context.set_binding(segment_id, portbindings.VIF_TYPE_OVS,
{portbindings.CAP_PORT_FILTER: True}, {portbindings.CAP_PORT_FILTER: True},
status=const.PORT_STATUS_ACTIVE) status=const.PORT_STATUS_ACTIVE)
self.bound_ports.add(context.current['id']) self.bound_ports.add((context.current['id'], host))
elif host == "host-hierarchical": elif host == "host-hierarchical":
segment_type = segment[api.NETWORK_TYPE] segment_type = segment[api.NETWORK_TYPE]
if segment_type == 'local': if segment_type == 'local':
@ -202,4 +230,4 @@ class TestMechanismDriver(api.MechanismDriver):
context.set_binding(segment_id, context.set_binding(segment_id,
portbindings.VIF_TYPE_OVS, portbindings.VIF_TYPE_OVS,
{portbindings.CAP_PORT_FILTER: False}) {portbindings.CAP_PORT_FILTER: False})
self.bound_ports.add(context.current['id']) self.bound_ports.add((context.current['id'], host))

View File

@ -15,6 +15,7 @@
import mock import mock
from neutron.common import constants as const
from neutron import context from neutron import context
from neutron.extensions import portbindings from neutron.extensions import portbindings
from neutron import manager from neutron import manager
@ -176,3 +177,123 @@ class PortBindingTestCase(test_plugin.NeutronDbPluginV2TestCase):
def test_update_from_host_to_empty_binding_notifies_agent(self): def test_update_from_host_to_empty_binding_notifies_agent(self):
self._test_update_port_binding('host-ovs-no_filter', '') self._test_update_port_binding('host-ovs-no_filter', '')
def test_dvr_binding(self):
ctx = context.get_admin_context()
with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port:
port_id = port['port']['id']
# Verify port's VIF type and status.
self.assertEqual(portbindings.VIF_TYPE_DISTRIBUTED,
port['port'][portbindings.VIF_TYPE])
self.assertEqual('DOWN', port['port']['status'])
# Update port to bind for a host.
self.plugin.update_dvr_port_binding(ctx, port_id, {'port': {
portbindings.HOST_ID: 'host-ovs-no_filter',
'device_id': 'router1'}})
# Get port and verify VIF type and status unchanged.
port = self._show('ports', port_id)
self.assertEqual(portbindings.VIF_TYPE_DISTRIBUTED,
port['port'][portbindings.VIF_TYPE])
self.assertEqual('DOWN', port['port']['status'])
# Get and verify binding details for host
details = self.plugin.endpoints[0].get_device_details(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
self.assertEqual('local', details['network_type'])
# Get port and verify VIF type and changed status.
port = self._show('ports', port_id)
self.assertEqual(portbindings.VIF_TYPE_DISTRIBUTED,
port['port'][portbindings.VIF_TYPE])
self.assertEqual('BUILD', port['port']['status'])
# Mark device up.
self.plugin.endpoints[0].update_device_up(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
# Get port and verify VIF type and changed status.
port = self._show('ports', port_id)
self.assertEqual(portbindings.VIF_TYPE_DISTRIBUTED,
port['port'][portbindings.VIF_TYPE])
self.assertEqual('ACTIVE', port['port']['status'])
# Mark device down.
self.plugin.endpoints[0].update_device_down(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
# Get port and verify VIF type and changed status.
port = self._show('ports', port_id)
self.assertEqual(portbindings.VIF_TYPE_DISTRIBUTED,
port['port'][portbindings.VIF_TYPE])
self.assertEqual('DOWN', port['port']['status'])
def test_dvr_binding_multi_host_status(self):
ctx = context.get_admin_context()
with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port:
port_id = port['port']['id']
# Update port to bind for 1st host.
self.plugin.update_dvr_port_binding(ctx, port_id, {'port': {
portbindings.HOST_ID: 'host-ovs-no_filter',
'device_id': 'router1'}})
# Mark 1st device up.
self.plugin.endpoints[0].update_device_up(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
# Get port and verify status is ACTIVE.
port = self._show('ports', port_id)
self.assertEqual('ACTIVE', port['port']['status'])
# Update port to bind for a 2nd host.
self.plugin.update_dvr_port_binding(ctx, port_id, {'port': {
portbindings.HOST_ID: 'host-bridge-filter',
'device_id': 'router1'}})
# Mark 2nd device up.
self.plugin.endpoints[0].update_device_up(
ctx, agent_id="the2ndAgentId", device=port_id,
host='host-bridge-filter')
# Get port and verify status unchanged.
port = self._show('ports', port_id)
self.assertEqual('ACTIVE', port['port']['status'])
# Mark 1st device down.
self.plugin.endpoints[0].update_device_down(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
# Get port and verify status unchanged.
port = self._show('ports', port_id)
self.assertEqual('ACTIVE', port['port']['status'])
# Mark 2nd device down.
self.plugin.endpoints[0].update_device_down(
ctx, agent_id="the2ndAgentId", device=port_id,
host='host-bridge-filter')
# Get port and verify status is DOWN.
port = self._show('ports', port_id)
self.assertEqual('DOWN', port['port']['status'])
def test_dvr_binding_update_unbound_host(self):
ctx = context.get_admin_context()
with self.port(device_owner=const.DEVICE_OWNER_DVR_INTERFACE) as port:
port_id = port['port']['id']
# Mark device up without first binding on host.
self.plugin.endpoints[0].update_device_up(
ctx, agent_id="theAgentId", device=port_id,
host='host-ovs-no_filter')
# Get port and verify status is still DOWN.
port = self._show('ports', port_id)
self.assertEqual('DOWN', port['port']['status'])