[02/xx] Add SFC drivers to Dragonflow

Added networking_sfc drivers for flow classifiers and SFC functionality.
Can be enabled in devstack with ENABLE_DF_SFC

Partially-implements: blueprint service-function-chaining
Change-Id: I3600caba5618ed16ebad565f73cc4ad1ca128693
This commit is contained in:
Dima Kuznetsov 2016-11-27 15:35:25 +02:00
parent 105928eec4
commit f01f6307c8
13 changed files with 846 additions and 1 deletions

View File

@ -64,6 +64,12 @@ ENABLE_NEUTRON_NOTIFIER=${ENABLE_NEUTRON_NOTIFIER:-"False"}
# Set value of TUNNEL_ENDPOINT_IP if unset # Set value of TUNNEL_ENDPOINT_IP if unset
TUNNEL_ENDPOINT_IP=${TUNNEL_ENDPOINT_IP:-$HOST_IP} TUNNEL_ENDPOINT_IP=${TUNNEL_ENDPOINT_IP:-$HOST_IP}
ENABLE_DF_SFC=${ENABLE_DF_SFC:-"False"}
if [[ $ENABLE_DF_SFC == "True" ]]; then
NEUTRON_SFC_DRIVERS=dragonflow
NEUTRON_FLOWCLASSIFIER_DRIVERS=dragonflow
fi
ACTION=$1 ACTION=$1
STAGE=$2 STAGE=$2

View File

@ -265,6 +265,14 @@ class PortRange(object):
if port_min is not None and port_max is not None: if port_min is not None and port_max is not None:
return cls(port_min, port_max) return cls(port_min, port_max)
def __eq__(self, other):
if type(other) != PortRange:
return False
return (self.min, self.max) == (other.min, other.max)
def __ne__(self, other):
return not (self == other)
class PortRangeField(fields.BaseField): class PortRangeField(fields.BaseField):
types = (PortRange,) types = (PortRange,)

View File

@ -9,6 +9,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
from jsonmodels import errors
from jsonmodels import fields from jsonmodels import fields
from neutron_lib import constants from neutron_lib import constants
@ -125,6 +126,16 @@ class FlowClassifier(mf.ModelBase,
return self.dest_port.is_local return self.dest_port.is_local
return True return True
def validate(self):
'''Make sure exactly one of {source_port, dest_port} is set'''
super(FlowClassifier, self).validate()
if self.source_port is None and self.dest_port is None:
raise errors.ValidationError(
'One of source_port or dest_port must be set')
elif self.source_port is not None and self.dest_port is not None:
raise errors.ValidationError(
'source_port and dest_port cannot be both set')
@mf.register_model @mf.register_model
@mf.construct_nb_db_model( @mf.construct_nb_db_model(

View File

@ -0,0 +1,104 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_sfc.services.flowclassifier.common import exceptions as fc_exc
from networking_sfc.services.flowclassifier.drivers import base
from dragonflow._i18n import _
from dragonflow.db import field_types
from dragonflow.db.models import sfc
from dragonflow.neutron.services import mixins
class DfFlowClassifierDriver(base.FlowClassifierDriverBase,
mixins.LazyNbApiMixin):
# The new flow classifier driver API:
def initialize(self):
pass
def create_flow_classifier_precommit(self, context):
flow_classifier = context.current
source_port = flow_classifier.get('logical_source_port')
dest_port = flow_classifier.get('logical_destination_port')
if source_port is None and dest_port is None:
raise fc_exc.FlowClassifierBadRequest(
message=_(
'Either logical_source_port or logical_destination_port '
'have to be specified'
),
)
if source_port is not None and dest_port is not None:
raise fc_exc.FlowClassifierBadRequest(
message=_(
'Both logical_source_port and logical_destination_port '
'cannot be specified'
),
)
def create_flow_classifier_postcommit(self, context):
flow_classifier = context.current
self.nb_api.create(
sfc.FlowClassifier(
id=flow_classifier['id'],
topic=flow_classifier['project_id'],
name=flow_classifier.get('name'),
ether_type=flow_classifier.get('ethertype'),
protocol=flow_classifier.get('protocol'),
source_cidr=flow_classifier.get('source_ip_prefix'),
dest_cidr=flow_classifier.get('destination_ip_prefix'),
source_transport_ports=field_types.PortRange.from_min_max(
flow_classifier.get('source_port_range_min'),
flow_classifier.get('source_port_range_max'),
),
dest_transport_ports=field_types.PortRange.from_min_max(
flow_classifier.get('destination_port_range_min'),
flow_classifier.get('destination_port_range_max'),
),
source_port=flow_classifier.get('logical_source_port'),
dest_port=flow_classifier.get('logical_destination_port'),
# FIXME (dimak) add support for l7_parameters
)
)
def update_flow_classifier_postcommit(self, context):
flow_classifier = context.current
# Only name can be updated (and description which we ignore)
self.nb_api.update(
sfc.FlowClassifier(
id=flow_classifier['id'],
topic=flow_classifier['project_id'],
name=flow_classifier.get('name'),
),
)
def delete_flow_classifier_postcommit(self, context):
flow_classifier = context.current
self.nb_api.delete(
sfc.FlowClassifier(
id=flow_classifier['id'],
topic=flow_classifier['project_id'],
),
)
# Legacy FC driver API, has to be stubbed due to ABC
def create_flow_classifier(self, context):
pass
def update_flow_classifier(self, context):
pass
def delete_flow_classifier(self, context):
pass

View File

@ -0,0 +1,183 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_sfc.services.sfc.drivers import base
from dragonflow.db.models import sfc
from dragonflow.neutron.services import mixins
def _get_optional_params(obj, *params):
'''This function returns a dictionary with all the parameters from `params`
that were present in `obj`, for example:
>>> _get_optional_params({'a': 1, 'b': 2}, 'a', 'c')
{'a': 1}
'''
res = {}
for param in params:
if param in obj:
res[param] = obj.get(param)
return res
class DfSfcDriver(base.SfcDriverBase, mixins.LazyNbApiMixin):
# The new SFC driver API:
def initialize(self):
pass
def create_port_chain_postcommit(self, context):
port_chain = context.current
pc_params = port_chain.get('chain_parameters')
self.nb_api.create(
sfc.PortChain(
id=port_chain['id'],
topic=port_chain['project_id'],
name=port_chain.get('name'),
port_pair_groups=port_chain.get('port_pair_groups', []),
flow_classifiers=port_chain.get('flow_classifiers', []),
protocol=pc_params.get('correlation'),
chain_id=port_chain.get('chain_id'),
),
)
def update_port_chain_postcommit(self, context):
port_chain = context.current
extra_args = _get_optional_params(
port_chain,
'port_pair_groups',
'flow_classifiers',
)
self.nb_api.update(
sfc.PortChain(
id=port_chain['id'],
topic=port_chain['project_id'],
name=port_chain.get('name'),
**extra_args
),
)
def delete_port_chain_postcommit(self, context):
port_chain = context.current
self.nb_api.delete(
sfc.PortChain(
id=port_chain['id'],
topic=port_chain['project_id'],
),
)
def create_port_pair_group_postcommit(self, context):
port_pair_group = context.current
self.nb_api.create(
sfc.PortPairGroup(
id=port_pair_group['id'],
topic=port_pair_group['project_id'],
name=port_pair_group.get('name'),
port_pairs=port_pair_group.get('port_pairs', []),
# FIXME (dimak) add support for lb_fields, service_type
),
)
def update_port_pair_group_postcommit(self, context):
port_pair_group = context.current
extra_args = _get_optional_params(port_pair_group, 'port_pairs')
self.nb_api.update(
sfc.PortPairGroup(
id=port_pair_group['id'],
topic=port_pair_group['project_id'],
name=port_pair_group.get('name'),
**extra_args
),
)
def delete_port_pair_group_postcommit(self, context):
port_pair_group = context.current
self.nb_api.delete(
sfc.PortPairGroup(
id=port_pair_group['id'],
topic=port_pair_group['project_id'],
),
)
def create_port_pair_postcommit(self, context):
port_pair = context.current
sf_params = port_pair.get('service_function_parameters', {})
self.nb_api.create(
sfc.PortPair(
id=port_pair['id'],
topic=port_pair['project_id'],
name=port_pair.get('name'),
ingress_port=port_pair['ingress'],
egress_port=port_pair['egress'],
correlation_mechanism=(
sf_params.get('correlation') or sfc.CORR_NONE
),
weight=sf_params.get('weight')
),
)
def update_port_pair_postcommit(self, context):
port_pair = context.current
self.nb_api.update(
sfc.PortPair(
id=port_pair['id'],
topic=port_pair['project_id'],
name=port_pair.get('name'),
),
)
def delete_port_pair_postcommit(self, context):
port_pair = context.current
self.nb_api.delete(
sfc.PortPair(
id=port_pair['id'],
topic=port_pair['project_id'],
),
)
# Legacy SFC driver API, has to be stubbed due to ABC
def create_port_chain(self, context):
pass
def update_port_chain(self, context):
pass
def delete_port_chain(self, context):
pass
def create_port_pair_group(self, context):
pass
def update_port_pair_group(self, context):
pass
def delete_port_pair_group(self, context):
pass
def create_port_pair(self, context):
pass
def update_port_pair(self, context):
pass
def delete_port_pair(self, context):
pass

View File

@ -0,0 +1,208 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.api import extensions as api_ext
from neutron.common import config
from neutron_lib import constants
from neutron_lib import context
from oslo_utils import importutils
import testtools
from networking_sfc.db import flowclassifier_db as fdb
from networking_sfc.extensions import flowclassifier
from networking_sfc.services.flowclassifier.common import context as fc_ctx
from networking_sfc.services.flowclassifier.common import exceptions as fc_exc
from networking_sfc.tests import base
from networking_sfc.tests.unit.db import test_flowclassifier_db
from dragonflow.db.models import sfc
from dragonflow.neutron.services.flowclassifier import driver
class TestDfFcDriver(
test_flowclassifier_db.FlowClassifierDbPluginTestCaseBase,
base.NeutronDbPluginV2TestCase
):
resource_prefix_map = dict([
(k, flowclassifier.FLOW_CLASSIFIER_PREFIX)
for k in flowclassifier.RESOURCE_ATTRIBUTE_MAP.keys()
])
def setUp(self):
flowclassifier_plugin = (
test_flowclassifier_db.DB_FLOWCLASSIFIER_PLUGIN_CLASS)
service_plugins = {
flowclassifier.FLOW_CLASSIFIER_EXT: flowclassifier_plugin
}
fdb.FlowClassifierDbPlugin.supported_extension_aliases = [
flowclassifier.FLOW_CLASSIFIER_EXT]
fdb.FlowClassifierDbPlugin.path_prefix = (
flowclassifier.FLOW_CLASSIFIER_PREFIX
)
super(TestDfFcDriver, self).setUp(
ext_mgr=None,
plugin=None,
service_plugins=service_plugins
)
self.flowclassifier_plugin = importutils.import_object(
flowclassifier_plugin)
ext_mgr = api_ext.PluginAwareExtensionManager(
test_flowclassifier_db.extensions_path,
{
flowclassifier.FLOW_CLASSIFIER_EXT: self.flowclassifier_plugin
}
)
app = config.load_paste_app('extensions_test_app')
self.ext_api = api_ext.ExtensionMiddleware(app, ext_mgr=ext_mgr)
self.ctx = context.get_admin_context()
self.driver = driver.DfFlowClassifierDriver()
self.driver.initialize()
self.driver._nb_api = mock.Mock()
def test_create_flow_classifier_precommit_source_port(self):
with self.port(
device_owner='compute',
device_id='test',
) as port:
with self.flow_classifier(flow_classifier={
'name': 'test1',
'logical_source_port': port['port']['id'],
}) as fc:
fc_context = fc_ctx.FlowClassifierContext(
self.flowclassifier_plugin, self.ctx,
fc['flow_classifier']
)
# Make sure validation step doesn't raise an exception
self.driver.create_flow_classifier_precommit(fc_context)
def test_create_flow_classifier_precommit_dest_port(self):
with self.port(
device_owner='compute',
device_id='test',
) as port:
with self.flow_classifier(flow_classifier={
'name': 'test1',
'logical_destination_port': port['port']['id'],
}) as fc:
fc_context = fc_ctx.FlowClassifierContext(
self.flowclassifier_plugin, self.ctx,
fc['flow_classifier']
)
# Make sure validation step doesn't raise an exception
self.driver.create_flow_classifier_precommit(fc_context)
def test_create_flow_classifier_precommit_both_ports(self):
with self.port(
device_owner='compute',
device_id='test',
) as port:
with self.flow_classifier(flow_classifier={
'name': 'test1',
'logical_source_port': port['port']['id'],
'logical_destination_port': port['port']['id'],
}) as fc:
with testtools.ExpectedException(
fc_exc.FlowClassifierBadRequest
):
self.driver.create_flow_classifier_precommit(
fc_ctx.FlowClassifierContext(
self.flowclassifier_plugin, self.ctx,
fc['flow_classifier']
),
)
def test_create_flow_classifier_precommit_no_ports(self):
with self.flow_classifier(flow_classifier={
'name': 'test1',
'logical_source_port': None,
'logical_destination_port': None,
}) as fc:
fc_context = fc_ctx.FlowClassifierContext(
self.flowclassifier_plugin, self.ctx,
fc['flow_classifier']
)
with testtools.ExpectedException(fc_exc.FlowClassifierBadRequest):
self.driver.create_flow_classifier_precommit(fc_context)
def _get_fc_ctx(self, **kwargs):
return fc_ctx.FlowClassifierContext(
self.flowclassifier_plugin,
self.ctx,
kwargs,
)
def test_create_flow_classifier_postcommit(self):
self.driver.create_flow_classifier_postcommit(
self._get_fc_ctx(
id='id1',
project_id='id2',
name='name',
ethertype=constants.IPv4,
source_ip_prefix='1.1.1.0/24',
destination_ip_prefix='2.2.2.0/24',
protocol=constants.PROTO_NAME_TCP,
source_port_range_min=1111,
source_port_range_max=2222,
destination_port_range_min=3333,
destination_port_range_max=4444,
logical_source_port='port1',
logical_destination_port='port2',
),
)
self.driver.nb_api.create.assert_called_once_with(
sfc.FlowClassifier(
id='id1',
topic='id2',
name='name',
ether_type=constants.IPv4,
source_cidr='1.1.1.0/24',
dest_cidr='2.2.2.0/24',
protocol=constants.PROTO_NAME_TCP,
source_transport_ports=[1111, 2222],
dest_transport_ports=[3333, 4444],
source_port='port1',
dest_port='port2',
),
)
def test_update_flow_classifier_postcommit(self):
self.driver.update_flow_classifier_postcommit(
self._get_fc_ctx(
id='id1',
project_id='id2',
name='new-name',
),
)
self.driver.nb_api.update.assert_called_once_with(
sfc.FlowClassifier(
id='id1',
topic='id2',
name='new-name',
),
)
def test_delete_flow_classifier_postcommit(self):
self.driver.delete_flow_classifier_postcommit(
self._get_fc_ctx(
id='id1',
project_id='id2',
),
)
self.driver.nb_api.delete.assert_called_once_with(
sfc.FlowClassifier(
id='id1',
topic='id2',
),
)

View File

@ -96,7 +96,7 @@ class TestFields(tests_base.BaseTestCase):
self.assertIsInstance(m_struct['ip_list'][0], six.string_types) self.assertIsInstance(m_struct['ip_list'][0], six.string_types)
self.assertIsInstance(m_struct['ip_list'][1], six.string_types) self.assertIsInstance(m_struct['ip_list'][1], six.string_types)
def test_prot_range(self): def test_port_range(self):
m = FieldTestModel(port_range=[100, 200]) m = FieldTestModel(port_range=[100, 200])
self.assertEqual([100, 200], m.to_struct().get('port_range')) self.assertEqual([100, 200], m.to_struct().get('port_range'))
self.assertEqual(100, m.port_range.min) self.assertEqual(100, m.port_range.min)

View File

@ -0,0 +1,263 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.api import extensions as api_ext
from neutron.common import config
from neutron_lib import context
from oslo_utils import importutils
from networking_sfc.db import flowclassifier_db as fdb
from networking_sfc.db import sfc_db
from networking_sfc.extensions import flowclassifier
from networking_sfc.extensions import sfc as sfc_ext
from networking_sfc.services.sfc.common import context as sfc_ctx
from networking_sfc.tests import base
from networking_sfc.tests.unit.db import test_flowclassifier_db
from networking_sfc.tests.unit.db import test_sfc_db
from dragonflow.db.models import sfc
from dragonflow.neutron.services.sfc import driver
class TestDfSfcDriver(
test_sfc_db.SfcDbPluginTestCaseBase,
test_flowclassifier_db.FlowClassifierDbPluginTestCaseBase,
base.NeutronDbPluginV2TestCase
):
resource_prefix_map = dict([
(k, sfc_ext.SFC_PREFIX)
for k in sfc_ext.RESOURCE_ATTRIBUTE_MAP.keys()
] + [
(k, flowclassifier.FLOW_CLASSIFIER_PREFIX)
for k in flowclassifier.RESOURCE_ATTRIBUTE_MAP.keys()
])
def setUp(self):
sfc_plugin = test_sfc_db.DB_SFC_PLUGIN_CLASS
flowclassifier_plugin = (
test_flowclassifier_db.DB_FLOWCLASSIFIER_PLUGIN_CLASS)
service_plugins = {
sfc_ext.SFC_EXT: sfc_plugin,
flowclassifier.FLOW_CLASSIFIER_EXT: flowclassifier_plugin
}
sfc_db.SfcDbPlugin.supported_extension_aliases = [
sfc_ext.SFC_EXT]
sfc_db.SfcDbPlugin.path_prefix = sfc_ext.SFC_PREFIX
fdb.FlowClassifierDbPlugin.supported_extension_aliases = [
flowclassifier.FLOW_CLASSIFIER_EXT]
fdb.FlowClassifierDbPlugin.path_prefix = (
flowclassifier.FLOW_CLASSIFIER_PREFIX
)
super(TestDfSfcDriver, self).setUp(
ext_mgr=None,
plugin=None,
service_plugins=service_plugins
)
self.sfc_plugin = importutils.import_object(sfc_plugin)
self.flowclassifier_plugin = importutils.import_object(
flowclassifier_plugin)
ext_mgr = api_ext.PluginAwareExtensionManager(
test_sfc_db.extensions_path,
{
sfc_ext.SFC_EXT: self.sfc_plugin,
flowclassifier.FLOW_CLASSIFIER_EXT: self.flowclassifier_plugin
}
)
app = config.load_paste_app('extensions_test_app')
self.ext_api = api_ext.ExtensionMiddleware(app, ext_mgr=ext_mgr)
self.ctx = context.get_admin_context()
self.driver = driver.DfSfcDriver()
self.driver.initialize()
self.driver._nb_api = mock.Mock()
def _get_ctx(self, cls, kwargs):
return sfc_ctx.PortPairContext(
self.sfc_plugin,
self.ctx,
kwargs,
)
def _get_pp_ctx(self, **kwargs):
return self._get_ctx(sfc_ctx.PortPairContext, kwargs)
def _get_ppg_ctx(self, **kwargs):
return self._get_ctx(sfc_ctx.PortPairGroupContext, kwargs)
def _get_pc_ctx(self, **kwargs):
return self._get_ctx(sfc_ctx.PortChainContext, kwargs)
def test_create_port_pair_postcommit(self):
self.driver.create_port_pair_postcommit(
self._get_pp_ctx(
id='id1',
project_id='id2',
name='name',
ingress='ingress-id',
egress='egress-id',
service_function_parameters={
'correlation': 'mpls',
'weight': 2,
},
),
)
self.driver.nb_api.create.assert_called_once_with(
sfc.PortPair(
id='id1',
topic='id2',
name='name',
ingress_port='ingress-id',
egress_port='egress-id',
correlation_mechanism=sfc.CORR_MPLS,
weight=2,
),
)
def test_update_port_pair_postcommit(self):
self.driver.update_port_pair_postcommit(
self._get_pp_ctx(
id='id1',
project_id='id2',
name='new-name',
),
)
self.driver.nb_api.update.assert_called_once_with(
sfc.PortPair(
id='id1',
topic='id2',
name='new-name',
),
)
def test_delete_port_pair_postcommit(self):
self.driver.delete_port_pair_postcommit(
self._get_pp_ctx(
id='id1',
project_id='id2',
),
)
self.driver.nb_api.delete.assert_called_once_with(
sfc.PortPair(
id='id1',
topic='id2',
),
)
def test_create_port_pair_group_postcommit(self):
self.driver.create_port_pair_group_postcommit(
self._get_ppg_ctx(
id='id1',
project_id='id2',
name='name',
port_pairs=['pp1'],
),
)
self.driver.nb_api.create.assert_called_once_with(
sfc.PortPairGroup(
id='id1',
topic='id2',
name='name',
port_pairs=['pp1'],
),
)
def test_update_port_pair_group_postcommit(self):
self.driver.update_port_pair_group_postcommit(
self._get_ppg_ctx(
id='id1',
project_id='id2',
name='new-name',
port_pairs=['pp1', 'pp2'],
),
)
self.driver.nb_api.update.assert_called_once_with(
sfc.PortPairGroup(
id='id1',
topic='id2',
name='new-name',
port_pairs=['pp1', 'pp2'],
),
)
def test_delete_port_pair_group_postcommit(self):
self.driver.delete_port_pair_group_postcommit(
self._get_ppg_ctx(
id='id1',
project_id='id2',
),
)
self.driver.nb_api.delete.assert_called_once_with(
sfc.PortPairGroup(
id='id1',
topic='id2',
),
)
def test_create_port_chain_postcommit(self):
self.driver.create_port_chain_postcommit(
self._get_pc_ctx(
id='id1',
project_id='id2',
name='name',
port_pair_groups=['ppg1', 'ppg2'],
flow_classifiers=['fc1', 'fc2'],
chain_id=7,
chain_parameters={
'correlation': 'mpls',
},
),
)
self.driver.nb_api.create.assert_called_once_with(
sfc.PortChain(
id='id1',
topic='id2',
name='name',
port_pair_groups=['ppg1', 'ppg2'],
flow_classifiers=['fc1', 'fc2'],
chain_id=7,
protocol=sfc.PROTO_MPLS,
),
)
def test_update_port_chain_postcommit(self):
self.driver.update_port_chain_postcommit(
self._get_pc_ctx(
id='id1',
project_id='id2',
name='new-name',
port_pair_groups=['ppg1', 'ppg2'],
flow_classifiers=['fc1', 'fc2'],
),
)
self.driver.nb_api.update.assert_called_once_with(
sfc.PortChain(
id='id1',
topic='id2',
name='new-name',
port_pair_groups=['ppg1', 'ppg2'],
flow_classifiers=['fc1', 'fc2'],
),
)
def test_delete_port_chain_postcommit(self):
self.driver.delete_port_chain_postcommit(
self._get_pc_ctx(
id='id1',
project_id='id2',
),
)
self.driver.nb_api.delete.assert_called_once_with(
sfc.PortChain(
id='id1',
topic='id2',
),
)

View File

@ -0,0 +1,57 @@
# Copyright (c) 2016 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from jsonmodels import errors
import testtools
from dragonflow.db.models import sfc
from dragonflow.tests import base as tests_base
class TestSfcModels(tests_base.BaseTestCase):
def test_flow_classifier_no_ports(self):
with testtools.ExpectedException(errors.ValidationError):
sfc.FlowClassifier(
id='id1',
topic='topic',
unique_key=1,
).validate()
def test_flow_classifier_both_ports(self):
with testtools.ExpectedException(errors.ValidationError):
sfc.FlowClassifier(
id='id1',
topic='topic',
unique_key=1,
source_port='port1',
dest_port='port2',
).validate()
def test_flow_classifier_source_port(self):
# Check no exception raised
sfc.FlowClassifier(
id='id1',
topic='topic',
unique_key=1,
source_port='port1',
).validate()
def test_flow_classifier_dest_port(self):
# Check no exception raised
sfc.FlowClassifier(
id='id1',
topic='topic',
unique_key=1,
dest_port='port1',
).validate()

View File

@ -51,6 +51,10 @@ neutron.ml2.mechanism_drivers =
df = dragonflow.neutron.ml2.mech_driver:DFMechDriver df = dragonflow.neutron.ml2.mech_driver:DFMechDriver
neutron.db.alembic_migrations = neutron.db.alembic_migrations =
dragonflow = dragonflow.db.neutron.migration:alembic_migrations dragonflow = dragonflow.db.neutron.migration:alembic_migrations
networking_sfc.flowclassifier.drivers =
dragonflow = dragonflow.neutron.services.flowclassifier.driver:DfFlowClassifierDriver
networking_sfc.sfc.drivers =
dragonflow = dragonflow.neutron.services.sfc.driver:DfSfcDriver
console_scripts = console_scripts =
df-db = dragonflow.cli.df_db:main df-db = dragonflow.cli.df_db:main
df-local-controller = dragonflow.cmd.eventlet.df_local_controller:main df-local-controller = dragonflow.cmd.eventlet.df_local_controller:main

View File

@ -16,6 +16,7 @@ set -ex
DIR=$(dirname $0) DIR=$(dirname $0)
${DIR}/tox_install_project.sh neutron neutron $* ${DIR}/tox_install_project.sh neutron neutron $*
${DIR}/tox_install_project.sh networking-sfc networking_sfc $*
CONSTRAINTS_FILE=$1 CONSTRAINTS_FILE=$1
shift shift