Browse Source

Merge "Add Service Function Chain support(2)"

changes/78/570778/2
Zuul 3 years ago
committed by Gerrit Code Review
parent
commit
2efa99b921
8 changed files with 641 additions and 29 deletions
  1. +51
    -0
      tricircle/network/central_fc_driver.py
  2. +86
    -6
      tricircle/network/central_sfc_driver.py
  3. +26
    -0
      tricircle/network/exceptions.py
  4. +230
    -0
      tricircle/tempestplugin/sfc_test.yaml
  5. +5
    -5
      tricircle/tempestplugin/smoke_test.sh
  6. +186
    -14
      tricircle/tests/unit/network/test_central_sfc_plugin.py
  7. +2
    -0
      tricircle/tests/unit/utils.py
  8. +55
    -4
      tricircle/xjob/xmanager.py

+ 51
- 0
tricircle/network/central_fc_driver.py View File

@ -16,8 +16,10 @@
from oslo_log import helpers as log_helpers
from oslo_log import log
from networking_sfc.db import sfc_db
from networking_sfc.services.flowclassifier.drivers import base as fc_driver
from neutron_lib.plugins import directory
from neutronclient.common import exceptions as client_exceptions
import tricircle.common.client as t_client
@ -25,6 +27,8 @@ import tricircle.common.constants as t_constants
import tricircle.common.context as t_context
from tricircle.common import xrpcapi
import tricircle.db.api as db_api
from tricircle.network import central_plugin
import tricircle.network.exceptions as n_exceptions
LOG = log.getLogger(__name__)
@ -79,6 +83,53 @@ class TricircleFcDriver(fc_driver.FlowClassifierDriverBase):
t_ctx, flowclassifier_id, t_constants.RT_FLOW_CLASSIFIER,
t_ctx.project_id)
def _get_chain_id_by_flowclassifier_id(
self, context, fc_plugin, flowclassifier_id):
chain_classifier_assoc = fc_plugin._model_query(
context, sfc_db.ChainClassifierAssoc).filter_by(
flowclassifier_id=flowclassifier_id).first()
if chain_classifier_assoc:
return chain_classifier_assoc['portchain_id']
return None
def _get_net_id_by_portchain_id(self, context, portchain_id):
sfc_plugin = directory.get_plugin('sfc')
port_chain = sfc_plugin.get_port_chain(context, portchain_id)
if not port_chain:
raise n_exceptions.PortChainNotFound(portchain_id=portchain_id)
port_pairs = sfc_plugin.get_port_pairs(
context, {'portpairgroup_id': port_chain['port_pair_groups']})
if not port_pairs:
raise n_exceptions.PortPairsNotFoundForPortPairGroup(
portpairgroup_id=port_chain['port_pair_groups'])
core_plugin = directory.get_plugin()
port = super(central_plugin.TricirclePlugin, core_plugin
).get_port(context, port_pairs[0]['ingress'])
if not port:
raise n_exceptions.PortNotFound(port_id=port_pairs[0]['ingress'])
return port['network_id']
def update_flow_classifier_precommit(self, context):
plugin_context = context._plugin_context
t_ctx = t_context.get_context_from_neutron_context(plugin_context)
flowclassifier = context.current
mappings = db_api.get_bottom_mappings_by_top_id(
t_ctx, flowclassifier['id'], t_constants.RT_FLOW_CLASSIFIER)
if mappings:
portchain_id = self._get_chain_id_by_flowclassifier_id(
plugin_context, context._plugin, flowclassifier['id'])
if not portchain_id:
raise n_exceptions.PortChainNotFoundForFlowClassifier(
flowclassifier_id=flowclassifier['id'])
net_id = self._get_net_id_by_portchain_id(plugin_context,
portchain_id)
if not net_id:
raise n_exceptions.NetNotFoundForPortChain(
portchain_id=portchain_id)
self.xjob_handler.sync_service_function_chain(
t_ctx, flowclassifier['project_id'], portchain_id,
net_id, t_constants.POD_NOT_SPECIFIED)
@log_helpers.log_method_call
def create_flow_classifier_precommit(self, context):
pass

+ 86
- 6
tricircle/network/central_sfc_driver.py View File

@ -15,6 +15,7 @@
from oslo_log import helpers as log_helpers
from networking_sfc.db import sfc_db
from networking_sfc.services.sfc.drivers import base as sfc_driver
from oslo_log import log
@ -28,6 +29,7 @@ import tricircle.common.context as t_context
from tricircle.common import xrpcapi
import tricircle.db.api as db_api
from tricircle.network import central_plugin
import tricircle.network.exceptions as n_exceptions
LOG = log.getLogger(__name__)
@ -47,6 +49,14 @@ class TricircleSfcDriver(sfc_driver.SfcDriverBase):
self.clients[region_name] = t_client.Client(region_name)
return self.clients[region_name]
def _get_net_id_by_port_id(self, context, port_id):
core_plugin = directory.get_plugin()
port = super(central_plugin.TricirclePlugin, core_plugin
).get_port(context, port_id)
if not port:
raise n_exceptions.PortNotFound(port_id=port_id)
return port['network_id']
def _get_net_id_by_portpairgroups(self, context,
sfc_plugin, port_pair_groups):
if not port_pair_groups:
@ -55,12 +65,7 @@ class TricircleSfcDriver(sfc_driver.SfcDriverBase):
context, {'portpairgroup_id': port_pair_groups})
if not port_pairs:
return None
# currently we only support port pairs in the same network
first_ingress = port_pairs[0]['ingress']
core_plugin = directory.get_plugin()
ingress_port = super(central_plugin.TricirclePlugin, core_plugin
).get_port(context, first_ingress)
return ingress_port['network_id']
return self._get_net_id_by_port_id(context, port_pairs[0]['ingress'])
@log_helpers.log_method_call
def create_port_chain(self, context):
@ -160,6 +165,81 @@ class TricircleSfcDriver(sfc_driver.SfcDriverBase):
t_ctx, portpair_id, t_constants.RT_PORT_PAIR,
t_ctx.project_id)
def update_port_chain_precommit(self, context):
plugin_context = context._plugin_context
t_ctx = t_context.get_context_from_neutron_context(plugin_context)
port_chain = context.current
mappings = db_api.get_bottom_mappings_by_top_id(
t_ctx, port_chain['id'], t_constants.RT_PORT_CHAIN)
if mappings:
net_id = self._get_net_id_by_portpairgroups(
plugin_context, context._plugin,
port_chain['port_pair_groups'])
if not net_id:
return
self.xjob_handler.sync_service_function_chain(
t_ctx, port_chain['project_id'], port_chain['id'],
net_id, t_constants.POD_NOT_SPECIFIED)
def _get_chain_id_by_group_id(self, context, sfc_plugin, portpairgroup_id):
chain_group_assoc = sfc_plugin._model_query(
context, sfc_db.ChainGroupAssoc).filter_by(
portpairgroup_id=portpairgroup_id).first()
if chain_group_assoc:
return chain_group_assoc['portchain_id']
return None
def update_port_pair_group_precommit(self, context):
plugin_context = context._plugin_context
t_ctx = t_context.get_context_from_neutron_context(
context._plugin_context)
port_pair_group = context.current
mappings = db_api.get_bottom_mappings_by_top_id(
t_ctx, port_pair_group['id'], t_constants.RT_PORT_PAIR_GROUP)
if mappings:
portchain_id = self._get_chain_id_by_group_id(
plugin_context, context._plugin, port_pair_group['id'])
if port_pair_group['port_pairs']:
net_id = self._get_net_id_by_portpairgroups(
plugin_context, context._plugin, [port_pair_group['id']])
elif context.original['port_pairs']:
portpair_id = context.original['port_pairs'][0]
port_pair = context._plugin._get_port_pair(
plugin_context, portpair_id)
net_id = self._get_net_id_by_port_id(
plugin_context, port_pair['ingress'])
else:
net_id = ''
if not portchain_id and not net_id:
return
self.xjob_handler.sync_service_function_chain(
t_ctx, port_pair_group['project_id'], portchain_id, net_id,
t_constants.POD_NOT_SPECIFIED)
def _get_chain_id_by_pair_id(self, context, sfc_plugin, portpair_id):
port_pair = sfc_plugin._get_port_pair(context, portpair_id)
if not port_pair:
raise n_exceptions.PortPairNotFound(portpair_id=portpair_id)
return self._get_chain_id_by_group_id(
context, sfc_plugin, port_pair['portpairgroup_id'])
def update_port_pair_precommit(self, context):
plugin_context = context._plugin_context
t_ctx = t_context.get_context_from_neutron_context(plugin_context)
port_pair = context.current
mappings = db_api.get_bottom_mappings_by_top_id(
t_ctx, port_pair['id'], t_constants.RT_PORT_PAIR)
if mappings:
portchain_id = self._get_chain_id_by_pair_id(
plugin_context, context._plugin, port_pair['id'])
net_id = self._get_net_id_by_port_id(
plugin_context, port_pair['ingress'])
if not portchain_id or not net_id:
return
self.xjob_handler.sync_service_function_chain(
t_ctx, port_pair['project_id'], portchain_id,
net_id, t_constants.POD_NOT_SPECIFIED)
@log_helpers.log_method_call
def update_port_chain(self, context):
pass


+ 26
- 0
tricircle/network/exceptions.py View File

@ -55,3 +55,29 @@ class SecurityGroupRuleNotFound(exceptions.NotFound):
class NetAttachedToNonLocalRouter(exceptions.Conflict):
message = _('Network %(network_id)s has already been attached to non '
'local router %(router_id)s')
class PortNotFound(exceptions.NotFound):
message = _('Port for id %(port_id)s not found')
class PortPairsNotFoundForPortPairGroup(exceptions.NotFound):
message = _(
'Port pairs for port pair group %(portpairgroup_id)s not found')
class PortPairNotFound(exceptions.NotFound):
message = _('Port pair for id %(portpair_id)s not found')
class PortChainNotFound(exceptions.NotFound):
message = _('Port chain for id %(portchain_id)s not found')
class PortChainNotFoundForFlowClassifier(exceptions.NotFound):
message = _(
'Port chain for flow classifier %(flowclassifier_id)s not found')
class NetNotFoundForPortChain(exceptions.NotFound):
message = _('Net for port chain %(portchain_id)s not found')

+ 230
- 0
tricircle/tempestplugin/sfc_test.yaml View File

@ -229,6 +229,236 @@
retries: 10
condition:
- status: SUCCESS
- task_set_id: update-port-chain
depend: [preparation]
tasks:
- task_id: update-pc
region: central
type: port_chain
action:
target: preparation@pc@id
method: update
params:
name: pc-update
port_pair_groups: [preparation@ppg1@id]
flow_classifiers: []
- task_id: check-job
region: central
type: job
depend: [update-pc]
validate:
predicate: all
retries: 10
condition:
- status: SUCCESS
- task_id: ppg1-1
region: region1
type: port_pair_group
query:
get_one: true
params:
name: ppg1
- task_id: ppg1-2
region: region2
type: port_pair_group
query:
get_one: true
params:
name: ppg1
- task_id: check-update-pc-1
region: region1
type: port_chain
depend:
- check-job
- ppg1-1
validate:
predicate: any
condition:
- name: pc-update
port_pair_groups: [ppg1-1@id]
flow_classifiers: []
- task_id: check-update-pc-2
region: region2
type: port_chain
depend:
- check-job
- ppg1-2
validate:
predicate: any
condition:
- name: pc-update
port_pair_groups: [ppg1-2@id]
flow_classifiers: []
- task_id: update-pc-to-original
region: central
type: port_chain
depend:
- check-update-pc-1
- check-update-pc-2
action:
target: preparation@pc@id
method: update
params:
name: pc
flow_classifiers: [preparation@fc@id]
port_pair_groups:
- preparation@ppg1@id
- preparation@ppg2@id
- task_set_id: update-flow-classifier
depend: [preparation]
tasks:
- task_id: update-fc
region: central
type: flow_classifier
action:
target: preparation@fc@id
method: update
params:
name: fc-update
- task_id: check-job
region: central
type: job
depend: [update-fc]
validate:
predicate: all
retries: 10
condition:
- status: SUCCESS
- task_id: check-update-fc-1
region: region1
type: flow_classifier
depend: [check-job]
query:
get_one: true
params:
name: fc-update
- task_id: check-update-fc-2
region: region2
type: flow_classifier
depend: [check-job]
query:
get_one: true
params:
name: fc-update
- task_id: update-fc-to-original
region: central
type: flow_classifier
depend:
- check-update-fc-1
- check-update-fc-2
action:
target: preparation@fc@id
method: update
params:
name: fc
- task_set_id: update-port-pair-group
depend: [preparation]
tasks:
- task_id: update-ppg1
region: central
type: port_pair_group
action:
target: preparation@ppg1@id
method: update
params:
name: ppg1-update
port_pairs: []
- task_id: check-job
region: central
type: job
depend: [update-ppg1]
validate:
predicate: all
retries: 10
condition:
- status: SUCCESS
- task_id: check-update-ppg1-1
region: region1
type: port_pair_group
depend: [check-job]
validate:
predicate: any
condition:
- name: ppg1-update
port_pairs: []
- task_id: check-update-ppg1-2
region: region2
type: port_pair_group
depend: [check-job]
validate:
predicate: any
condition:
- name: ppg1-update
port_pairs: []
- task_id: update-ppg1-to-original
region: central
type: port_pair_group
depend:
- check-update-ppg1-1
- check-update-ppg1-2
action:
target: preparation@ppg1@id
method: update
params:
name: ppg1
port_pairs: [preparation@pp1@id]
- task_set_id: update-port-pair
depend: [preparation]
tasks:
- task_id: update-pp1
region: central
type: port_pair
action:
target: preparation@pp1@id
method: update
params:
name: pp1-update
- task_id: check-job
region: central
type: job
depend: [update-pp1]
validate:
predicate: all
retries: 10
condition:
- status: SUCCESS
- task_id: check-update-pp1-1
region: region1
type: port_pair
depend: [check-job]
query:
get_one: true
params:
name: pp1-update
- task_id: check-update-pp1-2
region: region2
type: port_pair
depend: [check-job]
query:
get_one: true
params:
name: pp1-update
- task_id: update-pp1-to-original
region: central
type: port_pair
depend:
- check-update-pp1-1
- check-update-pp1-2
action:
target: preparation@pp1@id
method: update
params:
name: pp1
- task_set_id: wait-for-job
tasks:
- task_id: check-job
region: central
type: job
validate:
predicate: all
retries: 10
condition:
- status: SUCCESS
- task_set_id: check
depend: [preparation]
tasks:


+ 5
- 5
tricircle/tempestplugin/smoke_test.sh View File

@ -20,11 +20,11 @@ python run_yaml_test.py trunk_test.yaml "$OS_AUTH_URL" "$OS_TENANT_NAME" "$OS_US
if [ $? != 0 ]; then
die $LINENO "Smoke test fails, error in trunk test"
fi
#echo "Start to run service function chain test"
#python run_yaml_test.py sfc_test.yaml "$OS_AUTH_URL" "$OS_TENANT_NAME" "$OS_USERNAME" "$OS_PASSWORD"
#if [ $? != 0 ]; then
# die $LINENO "Smoke test fails, error in service function chain test"
#fi
echo "Start to run service function chain test"
python run_yaml_test.py sfc_test.yaml "$OS_AUTH_URL" "$OS_TENANT_NAME" "$OS_USERNAME" "$OS_PASSWORD" "$OS_PROJECT_DOMAIN_ID" "$OS_USER_DOMAIN_ID"
if [ $? != 0 ]; then
die $LINENO "Smoke test fails, error in service function chain test"
fi
echo "Start to run qos policy function test"
python run_yaml_test.py qos_policy_rule_test.yaml "$OS_AUTH_URL" "$OS_TENANT_NAME" "$OS_USERNAME" "$OS_PASSWORD" "$OS_PROJECT_DOMAIN_ID" "$OS_USER_DOMAIN_ID"
if [ $? != 0 ]; then


+ 186
- 14
tricircle/tests/unit/network/test_central_sfc_plugin.py View File

@ -14,6 +14,7 @@
# under the License.
import copy
from mock import patch
import six
import unittest
@ -51,6 +52,9 @@ TOP_PORTPAIRS = _resource_store.TOP_SFC_PORT_PAIRS
TOP_PORTPAIRGROUPS = _resource_store.TOP_SFC_PORT_PAIR_GROUPS
TOP_PORTCHAINS = _resource_store.TOP_SFC_PORT_CHAINS
TOP_FLOWCLASSIFIERS = _resource_store.TOP_SFC_FLOW_CLASSIFIERS
TOP_CHAIN_GROUP_ASSOCS = _resource_store.TOP_SFC_CHAIN_GROUP_ASSOCIATIONS
TOP_CHAIN_CLASSIFIER_ASSOCS = (
_resource_store.TOP_SFC_CHAIN_CLASSIFIER_ASSOCIATIONS)
BOTTOM1_PORTS = _resource_store.BOTTOM1_PORTS
BOTTOM2_PORTS = _resource_store.BOTTOM2_PORTS
BOTTOM1_PORTPAIRS = _resource_store.BOTTOM1_SFC_PORT_PAIRS
@ -174,16 +178,21 @@ class FakeClient(test_utils.FakeClient):
return super(FakeClient, self).create_resources(_type, ctx, body)
def get_port_chains(self, ctx, portchain_id):
return self.get_resource('port_chain', ctx, portchain_id)
res = self.get_resource('port_chain', ctx, portchain_id)
return copy.copy(res) if res else res
def get_port_pair_groups(self, ctx, portpairgroup_id):
return self.get_resource('port_pair_group', ctx, portpairgroup_id)
res = self.get_resource('port_pair_group', ctx, portpairgroup_id)
return copy.copy(res) if res else res
def get_flow_classifiers(self, ctx, flowclassifier_id):
return self.get_resource('flow_classifier', ctx, flowclassifier_id)
res = self.get_resource('flow_classifier', ctx, flowclassifier_id)
return copy.copy(res) if res else res
def list_port_pairs(self, ctx, filters=None):
return self.list_resources('port_pair', ctx, filters)
def list_port_pairs(self, ctx, filters=None, _copy=True):
portpairs = self.list_resources('port_pair', ctx, filters)
portpairs_copy = [copy.copy(pp) for pp in portpairs]
return portpairs_copy if _copy else portpairs
def list_flow_classifiers(self, ctx, filters=None):
return self.list_resources('flow_classifier', ctx, filters)
@ -194,15 +203,27 @@ class FakeClient(test_utils.FakeClient):
def list_port_pair_groups(self, ctx, filters=None):
return self.list_resources('port_pair_group', ctx, filters)
def update_port_pair_groups(self, ctx, id, port_pair_group):
def update_port_pairs(self, ctx, portpair_id, body):
return self.update_resources('port_pair', ctx,
portpair_id, body)
def update_port_pair_groups(self, ctx, portpairgroup_id, body):
filters = [{'key': 'portpairgroup_id',
'comparator': 'eq',
'value': id}]
pps = self.list_port_pairs(ctx, filters)
'value': portpairgroup_id}]
pps = self.list_port_pairs(ctx, filters, False)
for pp in pps:
pp['portpairgroup_id'] = None
return self.update_resources('port_pair_group',
ctx, id, port_pair_group)
return self.update_resources('port_pair_group', ctx,
portpairgroup_id, body)
def update_flow_classifiers(self, ctx, flowclassifier_id, body):
return self.update_resources('flow_classifier', ctx,
flowclassifier_id, body)
def update_port_chains(self, ctx, portchain_id, body):
return self.update_resources('port_chain', ctx,
portchain_id, body)
def get_ports(self, ctx, port_id):
return self.get_resource('port', ctx, port_id)
@ -314,13 +335,15 @@ class FakeSfcPlugin(sfc_plugin.TricircleSfcPlugin):
return FakeClient(region_name)
def get_port_pairs(self, context, filters=None):
ret = []
client = self._get_client('top')
_filter = []
for key, values in six.iteritems(filters):
for v in values:
_filter.append(
{'key': key, 'comparator': 'eq', 'value': v})
return client.list_resources('port_pair', context, _filter)
_filter = [{'key': key, 'comparator': 'eq', 'value': v}]
res = client.list_resources('port_pair', context, _filter)
if res:
ret.extend(res)
return ret
def get_port_chain(self, context, id, fields=None):
client = self._get_client('top')
@ -337,10 +360,25 @@ def fake_get_context_from_neutron_context(q_context):
return ctx
def fake_validate_pps_in_ppg(self, portpairs_list, id=None):
pass
def fake_make_port_pair_group_dict(self, port_pair_group, fields=None):
port_pairs = port_pair_group.port_pairs
if isinstance(port_pairs[0], test_utils.DotDict):
port_pair_group['port_pairs'] = [pp['id'] for pp in port_pairs]
return port_pair_group
def fake_make_port_chain_dict(self, port_chain, fields=None):
port_chain['port_pair_groups'] = [assoc['portpairgroup_id'] for assoc in
port_chain.chain_group_associations]
port_chain['flow_classifiers'] = [assoc['flowclassifier_id'] for assoc in
port_chain.chain_classifier_associations]
return port_chain
def fake_make_port_pair_dict(self, port_pair, fields=None):
return port_pair
@ -586,6 +624,17 @@ class PluginTest(unittest.TestCase):
return t_fc_id, b_fc_id
def _prepare_chain_group_assoc_test(self, chain_id, group_id):
chain_group_assoc = {'portpairgroup_id': group_id,
'portchain_id': chain_id}
TOP_CHAIN_GROUP_ASSOCS.append(DotDict(chain_group_assoc))
def _prepare_chain_classifier_assoc_test(self,
chain_id, flowclassifier_id):
chain_classifier_assoc = {'flowclassifier_id': flowclassifier_id,
'portchain_id': chain_id}
TOP_CHAIN_CLASSIFIER_ASSOCS.append(DotDict(chain_classifier_assoc))
def _prepare_port_chain_test(self, project_id, t_ctx, pod_name,
index, create_bottom, ids):
t_pc_id = uuidutils.generate_uuid()
@ -879,6 +928,129 @@ class PluginTest(unittest.TestCase):
self.assertEqual(len(TOP_FLOWCLASSIFIERS), 0)
self.assertEqual(len(ppg_mappings), 0)
@patch.object(sfc_db.SfcDbPlugin, '_validate_pps_in_ppg',
new=fake_validate_pps_in_ppg)
@patch.object(sfc_db.SfcDbPlugin, '_make_port_chain_dict',
new=fake_make_port_chain_dict)
@patch.object(sfc_db.SfcDbPlugin, '_make_port_pair_group_dict',
new=fake_make_port_pair_group_dict)
@patch.object(sfc_db.SfcDbPlugin, '_make_port_pair_dict',
new=fake_make_port_pair_dict)
@patch.object(sfc_db.SfcDbPlugin, 'get_port_chain',
new=FakeSfcPlugin.get_port_chain)
@patch.object(db_base_plugin_v2.NeutronDbPluginV2, 'get_port',
new=FakeCorePlugin.get_port)
@patch.object(sfc_db.SfcDbPlugin, 'get_port_pairs',
new=FakeSfcPlugin.get_port_pairs)
@patch.object(context, 'get_context_from_neutron_context',
new=fake_get_context_from_neutron_context)
def test_update_service_function_chain(self):
project_id = TEST_TENANT_ID
q_ctx = FakeNeutronContext()
t_ctx = context.get_db_context()
self._basic_pod_setup()
fake_sfc_plugin = FakeSfcPlugin()
fake_fc_plugin = FakeFcPlugin()
t_net_id = self._prepare_net_test(project_id, t_ctx, 'pod_1')
src_port_id = self._prepare_port_test(project_id,
t_ctx, 'pod_1', t_net_id)
ingress1 = self._prepare_port_test(project_id, t_ctx,
'pod_1', t_net_id)
egress1 = self._prepare_port_test(project_id, t_ctx, 'pod_1', t_net_id)
t_pp1_id, b_pp1_id = self._prepare_port_pair_test(
project_id, t_ctx, 'pod_1', 0, ingress1, egress1, True)
t_ppg1_id, b_ppg1_id = self._prepare_port_pair_group_test(
project_id, t_ctx, 'pod_1', 0,
[t_pp1_id], True, [b_pp1_id])
ppg_mapping = {t_pp1_id: t_ppg1_id}
self._update_port_pair_test(ppg_mapping, TOP_PORTPAIRS)
t_fc1_id, b_fc1_id = self._prepare_flow_classifier_test(
project_id, t_ctx, 'pod_1', 0, src_port_id, True)
ids = {'t_ppg_id': [t_ppg1_id],
'b_ppg_id': [b_ppg1_id],
't_fc_id': [t_fc1_id],
'b_fc_id': [b_fc1_id]}
t_pc1_id, b_pc1_id = self._prepare_port_chain_test(
project_id, t_ctx, 'pod_1', 0, True, ids)
self._prepare_chain_group_assoc_test(t_pc1_id, t_ppg1_id)
self._prepare_chain_classifier_assoc_test(t_pc1_id, t_fc1_id)
pp_body = {'port_pair': {
'name': 'new_name',
'description': 'new_pp_description'}}
fake_sfc_plugin.update_port_pair(q_ctx, t_pp1_id, pp_body)
self.assertEqual(TOP_PORTPAIRS[0]['description'], 'new_pp_description')
self.assertEqual(TOP_PORTPAIRS[0]['name'], 'new_name')
self.assertEqual(BOTTOM1_PORTPAIRS[0]['description'],
'new_pp_description')
self.assertEqual(BOTTOM1_PORTPAIRS[0]['name'], 'new_name')
fc_body = {'flow_classifier': {
'name': 'new_name',
'description': 'new_fc_description'}}
fake_fc_plugin.update_flow_classifier(q_ctx, t_fc1_id, fc_body)
self.assertEqual(TOP_FLOWCLASSIFIERS[0]['name'], 'new_name')
self.assertEqual(TOP_FLOWCLASSIFIERS[0]['description'],
'new_fc_description')
self.assertEqual(BOTTOM1_FLOWCLASSIFIERS[0]['name'], 'new_name')
self.assertEqual(BOTTOM1_FLOWCLASSIFIERS[0]['description'],
'new_fc_description')
ingress2 = self._prepare_port_test(project_id, t_ctx,
'pod_1', t_net_id)
egress2 = self._prepare_port_test(project_id, t_ctx, 'pod_1', t_net_id)
t_pp2_id, b_pp2_id = self._prepare_port_pair_test(
project_id, t_ctx, 'pod_1', 0, ingress2, egress2, True)
ppg_body = {'port_pair_group': {
'name': 'new_name',
'description': 'new_ppg_description',
'port_pairs': [t_pp1_id, t_pp2_id]}}
ppg_mapping = {t_pp2_id: t_ppg1_id}
self._update_port_pair_test(ppg_mapping, TOP_PORTPAIRS)
fake_sfc_plugin.update_port_pair_group(q_ctx, t_ppg1_id, ppg_body)
self.assertEqual(TOP_PORTPAIRGROUPS[0]['name'], 'new_name')
self.assertEqual(TOP_PORTPAIRGROUPS[0]['description'],
'new_ppg_description')
self.assertEqual(TOP_PORTPAIRGROUPS[0]['port_pairs'],
[t_pp1_id, t_pp2_id])
self.assertEqual(BOTTOM1_PORTPAIRGROUPS[0]['name'], 'new_name')
self.assertEqual(BOTTOM1_PORTPAIRGROUPS[0]['description'],
'new_ppg_description')
self.assertEqual(BOTTOM1_PORTPAIRGROUPS[0]['port_pairs'],
[b_pp1_id, b_pp2_id])
t_ppg2_id, b_ppg2_id = self._prepare_port_pair_group_test(
project_id, t_ctx, 'pod_1', 0,
[], True, [])
t_fc2_id, b_fc2_id = self._prepare_flow_classifier_test(
project_id, t_ctx, 'pod_1', 0, src_port_id, True)
self._prepare_chain_group_assoc_test(t_pc1_id, t_ppg2_id)
self._prepare_chain_classifier_assoc_test(t_pc1_id, t_fc2_id)
pc_body = {'port_chain': {
'name': 'new_name',
'description': 'new_pc_description',
'port_pair_groups': [t_ppg1_id, t_ppg2_id],
'flow_classifiers': [t_fc1_id, t_fc2_id]}}
fake_sfc_plugin.update_port_chain(q_ctx, t_pc1_id, pc_body)
self.assertEqual(TOP_PORTCHAINS[0]['name'], 'new_name')
self.assertEqual(TOP_PORTCHAINS[0]['description'],
'new_pc_description')
self.assertEqual(TOP_PORTCHAINS[0]['port_pair_groups'],
[t_ppg1_id, t_ppg2_id])
self.assertEqual(TOP_PORTCHAINS[0]['flow_classifiers'],
[t_fc1_id, t_fc2_id])
self.assertEqual(BOTTOM1_PORTCHAINS[0]['name'], 'new_name')
self.assertEqual(BOTTOM1_PORTCHAINS[0]['description'],
'new_pc_description')
self.assertEqual(BOTTOM1_PORTCHAINS[0]['port_pair_groups'],
[b_ppg1_id, b_ppg2_id])
self.assertEqual(BOTTOM1_PORTCHAINS[0]['flow_classifiers'],
[b_fc1_id, b_fc2_id])
def tearDown(self):
core.ModelBase.metadata.drop_all(core.get_engine())
test_utils.get_resource_store().clean()


+ 2
- 0
tricircle/tests/unit/utils.py View File

@ -61,6 +61,8 @@ class ResourceStore(object):
('sfc_port_pair_groups', constants.RT_PORT_PAIR_GROUP),
('sfc_port_chains', constants.RT_PORT_CHAIN),
('sfc_flow_classifiers', constants.RT_FLOW_CLASSIFIER),
('sfc_chain_group_associations', None),
('sfc_chain_classifier_associations', None),
('qos_policies', constants.RT_QOS),
('qos_bandwidth_limit_rules',
'qos_bandwidth_limit_rules')]


+ 55
- 4
tricircle/xjob/xmanager.py View File

@ -1466,8 +1466,12 @@ class XManager(PeriodicTasks):
constants.JT_SFC_SYNC].split('#')
if b_pod_id == constants.POD_NOT_SPECIFIED:
mappings = db_api.get_bottom_mappings_by_top_id(
ctx, net_id, constants.RT_NETWORK)
if net_id:
mappings = db_api.get_bottom_mappings_by_top_id(
ctx, net_id, constants.RT_NETWORK)
elif t_port_chain_id:
mappings = db_api.get_bottom_mappings_by_top_id(
ctx, t_port_chain_id, constants.RT_PORT_CHAIN)
b_pods = [mapping[0] for mapping in mappings]
for b_pod in b_pods:
self.xjob_handler.sync_service_function_chain(
@ -1514,6 +1518,16 @@ class XManager(PeriodicTasks):
ctx, pp['project_id'], b_pod, {'id': pp_id},
constants.RT_PORT_PAIR, {'port_pair': pp}, b_client,
ingress=pp['ingress'])
pp_body = {'port_pair': {
'name': pp['name'],
'description': pp['description']}}
try:
b_client.update_port_pairs(ctx, b_pp_id, pp_body)
except q_cli_exceptions.NotFound:
LOG.Error(('port pair: %(pp_id)s not found,'
'region name: %(name)s'),
{'pp_id': pp_id, 'name': region_name})
raise
b_pp_ids[key].append(b_pp_id)
b_ppg_ids = []
@ -1525,6 +1539,18 @@ class XManager(PeriodicTasks):
ctx, ppg['project_id'], b_pod, {'id': ppg_id},
constants.RT_PORT_PAIR_GROUP, {'port_pair_group': ppg},
b_client, port_pairs=ppg['port_pairs'])
ppg_body = {'port_pair_group': {
'name': ppg['name'],
'description': ppg['description'],
'port_pairs': ppg['port_pairs']
}}
try:
b_client.update_port_pair_groups(ctx, b_ppg_id, ppg_body)
except q_cli_exceptions.NotFound:
LOG.Error(('port pair group: %(t_ppg_id)s not found,'
'region name: %(name)s'),
{'t_ppg_id': ppg_id, 'name': region_name})
raise
b_ppg_ids.append(b_ppg_id)
b_fc_ids = []
@ -1536,15 +1562,40 @@ class XManager(PeriodicTasks):
ctx, ppg['project_id'], b_pod, {'id': fc_id},
constants.RT_FLOW_CLASSIFIER, {'flow_classifier': fc},
b_client, logical_source_port=fc['logical_source_port'])
fc_body = {'flow_classifier': {
'name': fc['name'],
'description': fc['description']
}}
try:
b_client.update_flow_classifiers(ctx, b_fc_id, fc_body)
except q_cli_exceptions.NotFound:
LOG.Error(('flow classifier: %(fc_id)s not found,'
'region name: %(name)s'),
{'fc_id': fc_id, 'name': region_name})
raise
b_fc_ids.append(b_fc_id)
t_pc.pop('id')
t_pc['port_pair_groups'] = b_ppg_ids
t_pc['flow_classifiers'] = b_fc_ids
self._prepare_sfc_bottom_element(
b_pc_id = self._prepare_sfc_bottom_element(
ctx, t_pc['project_id'], b_pod, {'id': t_port_chain_id},
constants.RT_PORT_CHAIN, {'port_chain': t_pc}, b_client,
fc_id=b_fc_ids[0])
fc_id=b_fc_ids[0] if b_fc_ids else None)
pc_body = {'port_chain': {
'name': t_pc['name'],
'description': t_pc['description'],
'port_pair_groups': t_pc['port_pair_groups'],
'flow_classifiers': t_pc['flow_classifiers']
}}
try:
b_client.update_port_chains(ctx, b_pc_id, pc_body)
except q_cli_exceptions.NotFound:
LOG.Error(('port chain: %(pc_id)s not found, '
'region name: %(name)s'),
{'pc_id': t_port_chain_id, 'name': region_name})
raise
self.xjob_handler.recycle_resources(ctx, t_pc['project_id'])


Loading…
Cancel
Save