[Bug-Fix] Add support for multiple classifiers per chain
Closes-Bug: #1719839 Change-Id: I9884dc15584235b687c72a9f2cf9c180e9e1ce89
This commit is contained in:
parent
40ab3be2e9
commit
5a888e1b13
@ -78,9 +78,13 @@ tosca-vnffgd-sample.yaml>`_.
|
||||
type: ACL
|
||||
criteria:
|
||||
- network_src_port_id: 640dfd77-c92b-45a3-b8fc-22712de480e1
|
||||
- destination_port_range: 80-1024
|
||||
- ip_proto: 6
|
||||
- ip_dst_prefix: 192.168.1.2/24
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.1.2/24
|
||||
- network_src_port_id: 640dfd77-c92b-45a3-b8fc-22712de480eda
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.2.2/24
|
||||
|
||||
You can get network_src_port_id and IP destination address through
|
||||
OpenStack commands like bellow:
|
||||
@ -174,7 +178,7 @@ to the desired VNF instance:
|
||||
+--------------------------------------+------+---------------------------+--------+--------------------------------------+--------------------------------------+
|
||||
|
||||
tacker vnffg-create --vnffgd-name myvnffgd --vnf-mapping \
|
||||
VNFD1:'91e32c20-6d1f-47a4-9ba7-08f5e5effe07',VNF2:'7168062e-9fa1-4203-8cb7-f5c99ff3ee1b' myvnffg
|
||||
VNFD1:'91e32c20-6d1f-47a4-9ba7-08f5e5effe07',VNFD2:'7168062e-9fa1-4203-8cb7-f5c99ff3ee1b' myvnffg
|
||||
|
||||
Alternatively, if no vnf-mapping is provided then Tacker VNFFG will attempt
|
||||
to search for VNF instances derived from the given VNFDs in the VNFFGD. If
|
||||
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
features:
|
||||
- Add support for multiple classifiers per chain through
|
||||
VNFFG templates.
|
@ -0,0 +1,43 @@
|
||||
tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0
|
||||
|
||||
description: Sample VNFFG template
|
||||
|
||||
topology_template:
|
||||
description: Sample VNFFG template
|
||||
|
||||
node_templates:
|
||||
|
||||
Forwarding_path1:
|
||||
type: tosca.nodes.nfv.FP.Tacker
|
||||
description: creates path (CP12->CP22)
|
||||
properties:
|
||||
id: 51
|
||||
policy:
|
||||
type: ACL
|
||||
criteria:
|
||||
- network_src_port_id: 640dfd77-c92b-45a3-b8fc-22712de480e1
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.1.2/24
|
||||
- network_src_port_id: 640dfd77-c92b-45a3-b8fc-22712de480eda
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.2.2/24
|
||||
path:
|
||||
- forwarder: VNFD1
|
||||
capability: CP12
|
||||
- forwarder: VNFD2
|
||||
capability: CP22
|
||||
|
||||
groups:
|
||||
VNFFG1:
|
||||
type: tosca.groups.nfv.VNFFG
|
||||
description: HTTP to Corporate Net
|
||||
properties:
|
||||
vendor: tacker
|
||||
version: 1.0
|
||||
number_of_endpoints: 2
|
||||
dependent_virtual_link: [VL12,VL22]
|
||||
connection_point: [CP12,CP22]
|
||||
constituent_vnfs: [VNFD1,VNFD2]
|
||||
members: [Forwarding_path1]
|
@ -16,9 +16,9 @@ topology_template:
|
||||
type: ACL
|
||||
criteria:
|
||||
- network_src_port_id: 640dfd77-c92b-45a3-b8fc-22712de480e1
|
||||
- destination_port_range: 80-1024
|
||||
- ip_proto: 6
|
||||
- ip_dst_prefix: 192.168.1.2/24
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.1.2/24
|
||||
path:
|
||||
- forwarder: VNFD1
|
||||
capability: CP12
|
||||
|
@ -103,8 +103,9 @@ class VnffgNfp(model_base.BASE, models_v1.HasTenant, models_v1.HasId):
|
||||
name = sa.Column(sa.String(255), nullable=False)
|
||||
vnffg_id = sa.Column(types.Uuid, sa.ForeignKey('vnffgs.id'),
|
||||
nullable=False)
|
||||
classifier = orm.relationship('VnffgClassifier', backref='nfp',
|
||||
uselist=False)
|
||||
|
||||
# List of associated classifiers
|
||||
classifiers = orm.relationship('VnffgClassifier', backref='nfp')
|
||||
chain = orm.relationship('VnffgChain', backref='nfp',
|
||||
uselist=False)
|
||||
|
||||
@ -274,6 +275,10 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
self._make_classifier_dict,
|
||||
filters=filters, fields=fields)
|
||||
|
||||
def create_classifiers_map(self, classifier_ids, instance_ids):
|
||||
return {classifier_id: instance_ids[i]
|
||||
for i, classifier_id in enumerate(classifier_ids)}
|
||||
|
||||
def get_nfp(self, context, nfp_id, fields=None):
|
||||
nfp_db = self._get_resource(context, VnffgNfp, nfp_id)
|
||||
return self._make_nfp_dict(nfp_db, fields)
|
||||
@ -372,7 +377,13 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
|
||||
nfp_id = uuidutils.generate_uuid()
|
||||
sfc_id = uuidutils.generate_uuid()
|
||||
classifier_id = uuidutils.generate_uuid()
|
||||
|
||||
matches = self._policy_to_acl_criteria(context, template_db,
|
||||
nfp_dict['name'],
|
||||
vnf_mapping)
|
||||
LOG.debug('acl_matches %s', matches)
|
||||
|
||||
classifier_ids = [uuidutils.generate_uuid() for i in matches]
|
||||
|
||||
nfp_db = VnffgNfp(id=nfp_id, vnffg_id=vnffg_id,
|
||||
tenant_id=tenant_id,
|
||||
@ -395,24 +406,21 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
|
||||
context.session.add(sfc_db)
|
||||
|
||||
sfcc_db = VnffgClassifier(id=classifier_id,
|
||||
tenant_id=tenant_id,
|
||||
status=constants.PENDING_CREATE,
|
||||
nfp_id=nfp_id,
|
||||
chain_id=sfc_id)
|
||||
context.session.add(sfcc_db)
|
||||
for i, classifier_id in enumerate(classifier_ids):
|
||||
|
||||
match = self._policy_to_acl_criteria(context, template_db,
|
||||
nfp_dict['name'],
|
||||
vnf_mapping)
|
||||
LOG.debug('acl_match %s', match)
|
||||
sfcc_db = VnffgClassifier(id=classifier_id,
|
||||
tenant_id=tenant_id,
|
||||
status=constants.PENDING_CREATE,
|
||||
nfp_id=nfp_id,
|
||||
chain_id=sfc_id)
|
||||
context.session.add(sfcc_db)
|
||||
|
||||
match_db_table = ACLMatchCriteria(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnffgc_id=classifier_id,
|
||||
**match)
|
||||
match_db_table = ACLMatchCriteria(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnffgc_id=classifier_id,
|
||||
**matches[i])
|
||||
|
||||
context.session.add(match_db_table)
|
||||
context.session.add(match_db_table)
|
||||
|
||||
return self._make_vnffg_dict(vnffg_db)
|
||||
|
||||
@ -598,6 +606,25 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
elif vnf_dict['vim_id'] != vim_id:
|
||||
raise nfvo.VnffgVimMappingException(vnf_id=vnf, vim_id=vim_id)
|
||||
|
||||
def _validate_criteria(self, criteria):
|
||||
"""Validate whether or not the classifiers are unique.
|
||||
|
||||
We define a classifier as unique when at least one
|
||||
key-value pair is different from another classifier.
|
||||
"""
|
||||
if not criteria:
|
||||
raise nfvo.NfpPolicyCriteriaIndexError()
|
||||
elif len(criteria) == 1:
|
||||
pass
|
||||
else:
|
||||
for index, dict_one in enumerate(criteria):
|
||||
if index != (len(criteria) - 1):
|
||||
for dict_two in criteria[(index + 1):]:
|
||||
if dict_one == dict_two:
|
||||
raise nfvo. \
|
||||
NfpDuplicatePolicyCriteria(first_dict=dict_one,
|
||||
sec_dict=dict_two)
|
||||
|
||||
def _policy_to_acl_criteria(self, context, template_db, nfp_name,
|
||||
vnf_mapping):
|
||||
template = template_db.template['vnffgd']['topology_template']
|
||||
@ -612,10 +639,14 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
raise nfvo.NfpPolicyTypeError(type=policy['type'])
|
||||
|
||||
if 'criteria' not in policy:
|
||||
raise nfvo.NfpPolicyCriteriaError(error="Missing criteria in "
|
||||
"policy")
|
||||
match = dict()
|
||||
raise nfvo.NfpPolicyCriteriaError(
|
||||
error="Missing criteria in policy")
|
||||
|
||||
self._validate_criteria(policy['criteria'])
|
||||
|
||||
matches = []
|
||||
for criteria in policy['criteria']:
|
||||
match = dict()
|
||||
for key, val in criteria.items():
|
||||
if key in MATCH_CRITERIA:
|
||||
match.update(self._convert_criteria(context, key, val,
|
||||
@ -624,7 +655,9 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
raise nfvo.NfpPolicyCriteriaError(error="Unsupported "
|
||||
"criteria: "
|
||||
"{}".format(key))
|
||||
return match
|
||||
matches.append(match)
|
||||
|
||||
return matches
|
||||
|
||||
def _convert_criteria(self, context, criteria, value, vnf_mapping):
|
||||
"""Method is used to convert criteria to proper db value from template
|
||||
@ -676,12 +709,12 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
# called internally, not by REST API
|
||||
# instance_id = None means error on creation
|
||||
def _create_vnffg_post(self, context, sfc_instance_id,
|
||||
fc_instance_id, vnffg_dict):
|
||||
classifiers_map, vnffg_dict):
|
||||
LOG.debug('SFC created instance is %s', sfc_instance_id)
|
||||
LOG.debug('Flow Classifier created instance is %s', fc_instance_id)
|
||||
LOG.debug('Flow Classifiers created instances are %s',
|
||||
[classifiers_map[item] for item in classifiers_map])
|
||||
nfp_dict = self.get_nfp(context, vnffg_dict['forwarding_paths'])
|
||||
sfc_id = nfp_dict['chain_id']
|
||||
classifier_id = nfp_dict['classifier_id']
|
||||
with context.session.begin(subtransactions=True):
|
||||
query = (self._model_query(context, VnffgChain).
|
||||
filter(VnffgChain.id == sfc_id).
|
||||
@ -692,32 +725,44 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
query.update({'status': constants.ERROR})
|
||||
else:
|
||||
query.update({'status': constants.ACTIVE})
|
||||
for classifier_id, fc_instance_id in classifiers_map.items():
|
||||
query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_CREATE).
|
||||
one())
|
||||
query.update({'instance_id': fc_instance_id})
|
||||
|
||||
query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_CREATE).
|
||||
one())
|
||||
query.update({'instance_id': fc_instance_id})
|
||||
|
||||
if fc_instance_id is None:
|
||||
query.update({'status': constants.ERROR})
|
||||
else:
|
||||
query.update({'status': constants.ACTIVE})
|
||||
if fc_instance_id is None:
|
||||
query.update({'status': constants.ERROR})
|
||||
else:
|
||||
query.update({'status': constants.ACTIVE})
|
||||
|
||||
def _create_vnffg_status(self, context, vnffg):
|
||||
nfp = self.get_nfp(context, vnffg['forwarding_paths'])
|
||||
chain = self.get_sfc(context, nfp['chain_id'])
|
||||
classifier = self.get_classifier(context, nfp['classifier_id'])
|
||||
|
||||
if classifier['status'] == constants.ERROR or chain['status'] ==\
|
||||
constants.ERROR:
|
||||
if chain['status'] == constants.ERROR:
|
||||
self._update_all_status(context, vnffg['id'], nfp['id'],
|
||||
constants.ERROR)
|
||||
elif classifier['status'] == constants.ACTIVE and \
|
||||
chain['status'] == constants.ACTIVE:
|
||||
self._update_all_status(context, vnffg['id'], nfp['id'],
|
||||
|
||||
elif chain['status'] == constants.ACTIVE:
|
||||
classifiers_active_state = True
|
||||
for classifier in [self.get_classifier(context, classifier_id)
|
||||
for classifier_id in nfp['classifier_ids']]:
|
||||
|
||||
if classifier['status'] == constants.ACTIVE:
|
||||
continue
|
||||
elif classifier['status'] == constants.ERROR:
|
||||
classifiers_active_state = False
|
||||
break
|
||||
|
||||
if classifiers_active_state:
|
||||
self._update_all_status(context, vnffg['id'], nfp['id'],
|
||||
constants.ACTIVE)
|
||||
else:
|
||||
self._update_all_status(context, vnffg['id'], nfp['id'],
|
||||
constants.ERROR)
|
||||
|
||||
def _update_all_status(self, context, vnffg_id, nfp_id, status):
|
||||
with context.session.begin(subtransactions=True):
|
||||
@ -743,7 +788,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
vnffg = self.get_vnffg(context, vnffg_id)
|
||||
nfp = self.get_nfp(context, vnffg['forwarding_paths'])
|
||||
sfc = self.get_sfc(context, nfp['chain_id'])
|
||||
fc = self.get_classifier(context, nfp['classifier_id'])
|
||||
classifiers = [self.get_classifier(context, classifier_id) for
|
||||
classifier_id in nfp['classifier_ids']]
|
||||
with context.session.begin(subtransactions=True):
|
||||
vnffg_db = self._get_vnffg_db(context, vnffg['id'], _ACTIVE_UPDATE,
|
||||
constants.PENDING_UPDATE)
|
||||
@ -751,8 +797,10 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
constants.PENDING_UPDATE)
|
||||
self._get_sfc_db(context, sfc['id'], _ACTIVE_UPDATE,
|
||||
constants.PENDING_UPDATE)
|
||||
self._get_classifier_db(context, fc['id'], _ACTIVE_UPDATE,
|
||||
constants.PENDING_UPDATE)
|
||||
for classifier in classifiers:
|
||||
self._get_classifier_db(context, classifier['id'],
|
||||
_ACTIVE_UPDATE,
|
||||
constants.PENDING_UPDATE)
|
||||
return self._make_vnffg_dict(vnffg_db)
|
||||
|
||||
def _update_vnffg_post(self, context, vnffg_id, new_status,
|
||||
@ -760,18 +808,18 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
vnffg = self.get_vnffg(context, vnffg_id)
|
||||
nfp = self.get_nfp(context, vnffg['forwarding_paths'])
|
||||
sfc_id = nfp['chain_id']
|
||||
classifier_id = nfp['classifier_id']
|
||||
classifier_ids = nfp['classifier_ids']
|
||||
with context.session.begin(subtransactions=True):
|
||||
query = (self._model_query(context, VnffgChain).
|
||||
filter(VnffgChain.id == sfc_id).
|
||||
filter(VnffgChain.status == constants.PENDING_UPDATE))
|
||||
query.update({'status': new_status})
|
||||
|
||||
query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_UPDATE))
|
||||
query.update({'status': new_status})
|
||||
for classifier_id in classifier_ids:
|
||||
query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_UPDATE))
|
||||
query.update({'status': new_status})
|
||||
|
||||
query = (self._model_query(context, Vnffg).
|
||||
filter(Vnffg.id == vnffg['id']).
|
||||
@ -799,14 +847,15 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
for key in _VALID_SFC_UPDATE_ATTRIBUTES:
|
||||
sfc_query.update({key: new_sfc[key]})
|
||||
|
||||
def _update_classifier_post(self, context, sfc_id, new_status,
|
||||
def _update_classifier_post(self, context, classifier_ids, new_status,
|
||||
new_fc=None):
|
||||
with context.session.begin(subtransactions=True):
|
||||
fc_query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == sfc_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_UPDATE))
|
||||
fc_query.update({'status': new_status})
|
||||
for classifier_id in classifier_ids:
|
||||
fc_query = (self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier_id).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_UPDATE))
|
||||
fc_query.update({'status': new_status})
|
||||
|
||||
if new_fc is not None:
|
||||
for key in _VALID_FC_UPDATE_ATTRIBUTES:
|
||||
@ -872,7 +921,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
vnffg = self.get_vnffg(context, vnffg_id)
|
||||
nfp = self.get_nfp(context, vnffg['forwarding_paths'])
|
||||
chain = self.get_sfc(context, nfp['chain_id'])
|
||||
classifier = self.get_classifier(context, nfp['classifier_id'])
|
||||
classifiers = [self.get_classifier(context, classifier_id)
|
||||
for classifier_id in nfp['classifier_ids']]
|
||||
with context.session.begin(subtransactions=True):
|
||||
vnffg_db = self._get_vnffg_db(
|
||||
context, vnffg['id'], _ACTIVE_UPDATE_ERROR_DEAD,
|
||||
@ -881,9 +931,10 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
constants.PENDING_DELETE)
|
||||
self._get_sfc_db(context, chain['id'], _ACTIVE_UPDATE_ERROR_DEAD,
|
||||
constants.PENDING_DELETE)
|
||||
self._get_classifier_db(context, classifier['id'],
|
||||
_ACTIVE_UPDATE_ERROR_DEAD,
|
||||
constants.PENDING_DELETE)
|
||||
for classifier in classifiers:
|
||||
self._get_classifier_db(context, classifier['id'],
|
||||
_ACTIVE_UPDATE_ERROR_DEAD,
|
||||
constants.PENDING_DELETE)
|
||||
|
||||
return self._make_vnffg_dict(vnffg_db)
|
||||
|
||||
@ -891,7 +942,10 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
vnffg = self.get_vnffg(context, vnffg_id)
|
||||
nfp = self.get_nfp(context, vnffg['forwarding_paths'])
|
||||
chain = self.get_sfc(context, nfp['chain_id'])
|
||||
classifier = self.get_classifier(context, nfp['classifier_id'])
|
||||
classifiers = [self.get_classifier(context, classifier_id)
|
||||
for classifier_id in nfp['classifier_ids']]
|
||||
fc_queries = []
|
||||
match_queries = []
|
||||
with context.session.begin(subtransactions=True):
|
||||
vnffg_query = (
|
||||
self._model_query(context, Vnffg).
|
||||
@ -905,21 +959,26 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
self._model_query(context, VnffgChain).
|
||||
filter(VnffgChain.id == chain['id']).
|
||||
filter(VnffgChain.status == constants.PENDING_DELETE))
|
||||
fc_query = (
|
||||
self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier['id']).
|
||||
filter(VnffgClassifier.status == constants.PENDING_DELETE))
|
||||
match_query = (
|
||||
self._model_query(context, ACLMatchCriteria).
|
||||
filter(ACLMatchCriteria.vnffgc_id == classifier['id']))
|
||||
for classifier in classifiers:
|
||||
fc_queries.append((
|
||||
self._model_query(context, VnffgClassifier).
|
||||
filter(VnffgClassifier.id == classifier['id']).
|
||||
filter(VnffgClassifier.status ==
|
||||
constants.PENDING_DELETE)))
|
||||
match_queries.append((
|
||||
self._model_query(context, ACLMatchCriteria).
|
||||
filter(ACLMatchCriteria.vnffgc_id == classifier['id'])))
|
||||
if error:
|
||||
vnffg_query.update({'status': constants.ERROR})
|
||||
nfp_query.update({'status': constants.ERROR})
|
||||
sfc_query.update({'status': constants.ERROR})
|
||||
fc_query.update({'status': constants.ERROR})
|
||||
for fc_query in fc_queries:
|
||||
fc_query.update({'status': constants.ERROR})
|
||||
else:
|
||||
match_query.delete()
|
||||
fc_query.delete()
|
||||
for match_query in match_queries:
|
||||
match_query.delete()
|
||||
for fc_query in fc_queries:
|
||||
fc_query.delete()
|
||||
sfc_query.delete()
|
||||
nfp_query.delete()
|
||||
vnffg_query.delete()
|
||||
@ -957,7 +1016,8 @@ class VnffgPluginDbMixin(vnffg.VNFFGPluginBase, db_base.CommonDbMixin):
|
||||
def _make_nfp_dict(self, nfp_db, fields=None):
|
||||
LOG.debug('nfp_db %s', nfp_db)
|
||||
res = {'chain_id': nfp_db.chain['id'],
|
||||
'classifier_id': nfp_db.classifier['id']}
|
||||
'classifier_ids': [classifier['id'] for classifier in
|
||||
nfp_db.classifiers]}
|
||||
key_list = ('name', 'id', 'tenant_id', 'symmetrical', 'status',
|
||||
'path_id', 'vnffg_id')
|
||||
res.update((key, nfp_db[key]) for key in key_list)
|
||||
|
@ -195,6 +195,14 @@ class NfpPolicyCriteriaError(exceptions.PolicyCheckError):
|
||||
message = _('%(error)s in policy')
|
||||
|
||||
|
||||
class NfpPolicyCriteriaIndexError(exceptions.TackerException):
|
||||
message = _('Criteria list can not be empty')
|
||||
|
||||
|
||||
class NfpDuplicatePolicyCriteria(exceptions.TackerException):
|
||||
message = _('The %(first_dict)s and %(sec_dict)s are overlapped')
|
||||
|
||||
|
||||
class NfpPolicyNotFoundException(exceptions.NotFound):
|
||||
message = _('Policy not found in NFP %(nfp)s')
|
||||
|
||||
|
@ -381,7 +381,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
||||
|
||||
raise ValueError('empty match field for input flow classifier')
|
||||
|
||||
def create_chain(self, name, fc_id, vnfs, symmetrical=False,
|
||||
def create_chain(self, name, fc_ids, vnfs, symmetrical=False,
|
||||
auth_attr=None):
|
||||
if not auth_attr:
|
||||
LOG.warning("auth information required for n-sfc driver")
|
||||
@ -444,7 +444,7 @@ class OpenStack_Driver(abstract_vim_driver.VimAbstractDriver,
|
||||
port_chain['name'] = name + '-port-chain'
|
||||
port_chain['description'] = 'port-chain for Tacker VNFFG'
|
||||
port_chain['port_pair_groups'] = port_pair_group_list
|
||||
port_chain['flow_classifiers'] = [fc_id]
|
||||
port_chain['flow_classifiers'] = fc_ids
|
||||
return neutronclient_.port_chain_create(port_chain)
|
||||
|
||||
def update_chain(self, chain_id, fc_ids, vnfs,
|
||||
|
@ -303,9 +303,12 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
nfp = super(NfvoPlugin, self).get_nfp(context,
|
||||
vnffg_dict['forwarding_paths'])
|
||||
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
|
||||
match = super(NfvoPlugin, self).get_classifier(context,
|
||||
nfp['classifier_id'],
|
||||
fields='match')['match']
|
||||
matches = []
|
||||
for classifier_id in nfp['classifier_ids']:
|
||||
matches.append(super(NfvoPlugin, self).
|
||||
get_classifier(context,
|
||||
classifier_id,
|
||||
fields='match')['match'])
|
||||
# grab the first VNF to check it's VIM type
|
||||
# we have already checked that all VNFs are in the same VIM
|
||||
vim_obj = self._get_vim_from_vnf(context,
|
||||
@ -315,22 +318,28 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
# to the driver. Is it a session, or is full vim obj good enough?
|
||||
driver_type = vim_obj['type']
|
||||
try:
|
||||
fc_id = self._vim_drivers.invoke(driver_type,
|
||||
fc_ids = []
|
||||
for match in matches:
|
||||
fc_ids.append(self._vim_drivers.invoke(driver_type,
|
||||
'create_flow_classifier',
|
||||
name=vnffg_dict['name'],
|
||||
fc=match,
|
||||
auth_attr=vim_obj['auth_cred'],
|
||||
symmetrical=sfc['symmetrical'])
|
||||
symmetrical=sfc['symmetrical']))
|
||||
sfc_id = self._vim_drivers.invoke(driver_type,
|
||||
'create_chain',
|
||||
name=vnffg_dict['name'],
|
||||
vnfs=sfc['chain'], fc_id=fc_id,
|
||||
vnfs=sfc['chain'],
|
||||
fc_ids=fc_ids,
|
||||
symmetrical=sfc['symmetrical'],
|
||||
auth_attr=vim_obj['auth_cred'])
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
self.delete_vnffg(context, vnffg_id=vnffg_dict['id'])
|
||||
super(NfvoPlugin, self)._create_vnffg_post(context, sfc_id, fc_id,
|
||||
classifiers_map = super(NfvoPlugin, self). \
|
||||
create_classifiers_map(nfp['classifier_ids'], fc_ids)
|
||||
super(NfvoPlugin, self)._create_vnffg_post(context, sfc_id,
|
||||
classifiers_map,
|
||||
vnffg_dict)
|
||||
super(NfvoPlugin, self)._create_vnffg_status(context, vnffg_dict)
|
||||
return vnffg_dict
|
||||
@ -345,8 +354,9 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
vnffg_dict['forwarding_paths'])
|
||||
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
|
||||
|
||||
fc = super(NfvoPlugin, self).get_classifier(context,
|
||||
nfp['classifier_id'])
|
||||
classifiers = [super(NfvoPlugin, self).
|
||||
get_classifier(context, classifier_id) for classifier_id
|
||||
in nfp['classifier_ids']]
|
||||
template_db = self._get_resource(context, vnffg_db.VnffgTemplate,
|
||||
vnffg_dict['vnffgd_id'])
|
||||
vnf_members = self._get_vnffg_property(template_db.template,
|
||||
@ -376,13 +386,21 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
# we don't support updating the match criteria in first iteration
|
||||
# so this is essentially a noop. Good to keep for future use
|
||||
# though.
|
||||
self._vim_drivers.invoke(driver_type, 'update_flow_classifier',
|
||||
fc_id=fc['instance_id'], fc=fc['match'],
|
||||
auth_attr=vim_obj['auth_cred'],
|
||||
symmetrical=new_vnffg['symmetrical'])
|
||||
# In addition to that the code we are adding for the multiple
|
||||
# classifier support is also a noop and we are adding it so we
|
||||
# do not get compilation errors. It should be changed when the
|
||||
# update of the classifier will be supported.
|
||||
classifier_instances = []
|
||||
for classifier in classifiers:
|
||||
self._vim_drivers.invoke(driver_type, 'update_flow_classifier',
|
||||
fc_id=classifier['instance_id'],
|
||||
fc=classifier['match'],
|
||||
auth_attr=vim_obj['auth_cred'],
|
||||
symmetrical=new_vnffg['symmetrical'])
|
||||
classifier_instances.append(classifier['instance_id'])
|
||||
self._vim_drivers.invoke(driver_type, 'update_chain',
|
||||
vnfs=sfc['chain'],
|
||||
fc_ids=[fc['instance_id']],
|
||||
fc_ids=classifier_instances,
|
||||
chain_id=sfc['instance_id'],
|
||||
auth_attr=vim_obj['auth_cred'],
|
||||
symmetrical=new_vnffg['symmetrical'])
|
||||
@ -398,7 +416,8 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
constants.ACTIVE, sfc)
|
||||
# update classifier - this is just updating status until functional
|
||||
# updates are supported to classifier
|
||||
super(NfvoPlugin, self)._update_classifier_post(context, fc['id'],
|
||||
super(NfvoPlugin, self)._update_classifier_post(context,
|
||||
nfp['classifier_ids'],
|
||||
constants.ACTIVE)
|
||||
return vnffg_dict
|
||||
|
||||
@ -410,8 +429,9 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
vnffg_dict['forwarding_paths'])
|
||||
sfc = super(NfvoPlugin, self).get_sfc(context, nfp['chain_id'])
|
||||
|
||||
fc = super(NfvoPlugin, self).get_classifier(context,
|
||||
nfp['classifier_id'])
|
||||
classifiers = [super(NfvoPlugin, self).
|
||||
get_classifier(context, classifier_id)
|
||||
for classifier_id in nfp['classifier_ids']]
|
||||
vim_obj = self._get_vim_from_vnf(context,
|
||||
list(vnffg_dict[
|
||||
'vnf_mapping'].values())[0])
|
||||
@ -421,11 +441,12 @@ class NfvoPlugin(nfvo_db_plugin.NfvoPluginDb, vnffg_db.VnffgPluginDbMixin,
|
||||
self._vim_drivers.invoke(driver_type, 'delete_chain',
|
||||
chain_id=sfc['instance_id'],
|
||||
auth_attr=vim_obj['auth_cred'])
|
||||
if fc['instance_id'] is not None:
|
||||
self._vim_drivers.invoke(driver_type,
|
||||
'delete_flow_classifier',
|
||||
fc_id=fc['instance_id'],
|
||||
auth_attr=vim_obj['auth_cred'])
|
||||
for classifier in classifiers:
|
||||
if classifier['instance_id'] is not None:
|
||||
self._vim_drivers.invoke(driver_type,
|
||||
'delete_flow_classifier',
|
||||
fc_id=classifier['instance_id'],
|
||||
auth_attr=vim_obj['auth_cred'])
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception():
|
||||
vnffg_dict['status'] = constants.ERROR
|
||||
|
@ -45,6 +45,8 @@ vnffgd_tosca_multi_param_template = yaml.safe_load(_get_template(
|
||||
'tosca_vnffgd_multi_param_template.yaml'))
|
||||
vnffgd_invalid_tosca_template = yaml.safe_load(_get_template(
|
||||
'tosca_invalid_vnffgd_template.yaml'))
|
||||
vnffgd_tosca_dupl_criteria_template = yaml.safe_load(_get_template(
|
||||
'tosca_vnffgd_dupl_criteria_template.yaml'))
|
||||
vnfd_scale_tosca_template = _get_template('tosca_scale.yaml')
|
||||
vnfd_alarm_respawn_tosca_template = _get_template(
|
||||
'test_tosca_vnfd_alarm_respawn.yaml')
|
||||
@ -254,6 +256,17 @@ def get_dummy_vnffg_obj_vnf_mapping():
|
||||
'symmetrical': False}}
|
||||
|
||||
|
||||
def get_dummy_vnffg_obj_dupl_criteria():
|
||||
return {'vnffg': {'description': 'dummy_vnffg_description',
|
||||
'vnffgd_id': u'eb094833-995e-49f0-a047-dfb56aaf7c4e',
|
||||
'tenant_id': u'ad7ebc56538745a08ef7c5e97f8bd437',
|
||||
'name': 'dummy_vnffg',
|
||||
u'attributes': {u'template':
|
||||
vnffgd_tosca_dupl_criteria_template},
|
||||
'vnf_mapping': {},
|
||||
'symmetrical': False}}
|
||||
|
||||
|
||||
def get_dummy_nsd_obj():
|
||||
return {'nsd': {'description': 'dummy_nsd_description',
|
||||
'name': 'dummy_NSD',
|
||||
|
@ -197,7 +197,7 @@ class TestChainSFC(base.TestCase):
|
||||
vnfs = [vnf_1, vnf_2, vnf_3]
|
||||
|
||||
result = self.sfc_driver.create_chain(name='fake_ffg',
|
||||
fc_id=fc_id,
|
||||
fc_ids=fc_id,
|
||||
vnfs=vnfs,
|
||||
auth_attr=auth_attr)
|
||||
|
||||
@ -228,7 +228,7 @@ class TestChainSFC(base.TestCase):
|
||||
vnfs = [vnf_1, vnf_2, vnf_3]
|
||||
|
||||
chain_id = self.sfc_driver.create_chain(name='fake_ffg',
|
||||
fc_id=fc_id,
|
||||
fc_ids=fc_id,
|
||||
vnfs=vnfs,
|
||||
auth_attr=auth_attr)
|
||||
|
||||
|
@ -420,6 +420,18 @@ class TestNfvoPlugin(db_base.SqlTestCase):
|
||||
session.flush()
|
||||
return vnffg_template
|
||||
|
||||
def _insert_dummy_vnffg_duplicate_criteria_template(self):
|
||||
session = self.context.session
|
||||
vnffg_template = vnffg_db.VnffgTemplate(
|
||||
id='eb094833-995e-49f0-a047-dfb56aaf7c4e',
|
||||
tenant_id='ad7ebc56538745a08ef7c5e97f8bd437',
|
||||
name='fake_template',
|
||||
description='fake_template_description',
|
||||
template={u'vnffgd': utils.vnffgd_tosca_dupl_criteria_template})
|
||||
session.add(vnffg_template)
|
||||
session.flush()
|
||||
return vnffg_template
|
||||
|
||||
def _insert_dummy_vnffg(self):
|
||||
session = self.context.session
|
||||
vnffg = vnffg_db.Vnffg(
|
||||
@ -541,7 +553,7 @@ class TestNfvoPlugin(db_base.SqlTestCase):
|
||||
self._driver_manager.invoke.assert_called_with(mock.ANY, mock.ANY,
|
||||
name=mock.ANY,
|
||||
vnfs=mock.ANY,
|
||||
fc_id=mock.ANY,
|
||||
fc_ids=mock.ANY,
|
||||
auth_attr=mock.ANY,
|
||||
symmetrical=mock.ANY
|
||||
)
|
||||
@ -567,7 +579,7 @@ class TestNfvoPlugin(db_base.SqlTestCase):
|
||||
self._driver_manager.invoke.assert_called_with(mock.ANY, mock.ANY,
|
||||
name=mock.ANY,
|
||||
vnfs=mock.ANY,
|
||||
fc_id=mock.ANY,
|
||||
fc_ids=mock.ANY,
|
||||
auth_attr=mock.ANY,
|
||||
symmetrical=mock.ANY
|
||||
)
|
||||
@ -588,7 +600,7 @@ class TestNfvoPlugin(db_base.SqlTestCase):
|
||||
self._driver_manager.invoke.assert_called_with(mock.ANY, mock.ANY,
|
||||
name=mock.ANY,
|
||||
vnfs=mock.ANY,
|
||||
fc_id=mock.ANY,
|
||||
fc_ids=mock.ANY,
|
||||
auth_attr=mock.ANY,
|
||||
symmetrical=mock.ANY
|
||||
)
|
||||
@ -640,11 +652,23 @@ class TestNfvoPlugin(db_base.SqlTestCase):
|
||||
self._driver_manager.invoke.assert_called_with(mock.ANY, mock.ANY,
|
||||
name=mock.ANY,
|
||||
vnfs=mock.ANY,
|
||||
fc_id=mock.ANY,
|
||||
fc_ids=mock.ANY,
|
||||
auth_attr=mock.ANY,
|
||||
symmetrical=mock.ANY
|
||||
)
|
||||
|
||||
def test_create_vnffg_duplicate_criteria(self):
|
||||
with patch.object(TackerManager, 'get_service_plugins') as \
|
||||
mock_plugins:
|
||||
mock_plugins.return_value = {'VNFM': FakeVNFMPlugin()}
|
||||
mock.patch('tacker.common.driver_manager.DriverManager',
|
||||
side_effect=FakeDriverManager()).start()
|
||||
self._insert_dummy_vnffg_duplicate_criteria_template()
|
||||
vnffg_obj = utils.get_dummy_vnffg_obj_dupl_criteria()
|
||||
self.assertRaises(nfvo.NfpDuplicatePolicyCriteria,
|
||||
self.nfvo_plugin.create_vnffg,
|
||||
self.context, vnffg_obj)
|
||||
|
||||
def test_update_vnffg_nonexistent_vnf(self):
|
||||
with patch.object(TackerManager, 'get_service_plugins') as \
|
||||
mock_plugins:
|
||||
|
@ -0,0 +1,46 @@
|
||||
tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0
|
||||
|
||||
description: example template
|
||||
|
||||
topology_template:
|
||||
description: Example VNFFG template
|
||||
|
||||
node_templates:
|
||||
|
||||
Forwarding_path1:
|
||||
type: tosca.nodes.nfv.FP.Tacker
|
||||
description: creates path (CP11->CP12->CP32)
|
||||
properties:
|
||||
id: 51
|
||||
policy:
|
||||
type: ACL
|
||||
criteria:
|
||||
- network_name: tenant1_net
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.1.2/24
|
||||
- network_name: tenant1_net
|
||||
destination_port_range: 80-1024
|
||||
ip_proto: 6
|
||||
ip_dst_prefix: 192.168.1.2/24
|
||||
|
||||
path:
|
||||
- forwarder: VNF1
|
||||
capability: CP11
|
||||
- forwarder: VNF1
|
||||
capability: CP12
|
||||
- forwarder: VNF3
|
||||
capability: CP32
|
||||
|
||||
groups:
|
||||
VNFFG1:
|
||||
type: tosca.groups.nfv.VNFFG
|
||||
description: HTTP to Corporate Net
|
||||
properties:
|
||||
vendor: tacker
|
||||
version: 1.0
|
||||
number_of_endpoints: 3
|
||||
dependent_virtual_link: [VL1,VL2,VL3]
|
||||
connection_point: [CP11,CP12,CP32]
|
||||
constituent_vnfs: [VNF1,VNF3]
|
||||
members: [Forwarding_path1]
|
Loading…
Reference in New Issue
Block a user