Multi version API support
This patch provides a base to support multi version API. The existing code of functions for SOL specification was hard to understand and enhance since it is based on the code of legacy tacker API and they are connected with each other complicatedly. Therefore the code for SOL specification is newly created which is independent to the legacy tacker API so that it will be easy to maintain and enhance. This patch supports vnflcm v2 API (api_version 2.0.0) as a starting point. It supports less functions than the exsisting v1 API at the moment(Xena) but it will catch up with by the next release (Y). This patch makes supporting another API version easy when it will be supported in the future. Possibly it may thought to add v1 API to this code base. TODO: enhance UT/FT UT/FT is not sufficient at the moment. Additional UTs and FTs will be provided with another patches. Implements: blueprint multi-version-api Implements: blueprint support-nfv-solv3-start-and-terminate-vnf Implements: blueprint support-nfv-solv3-query-vnf-instances Implements: blueprint support-nfv-solv3-query-operation-occurrences Implements: blueprint support-nfv-solv3-subscriptions Change-Id: If76f315d8b3856e0eef9b8808b90f0b15d80d488
This commit is contained in:
parent
49ab5f9a15
commit
5f35b695bf
.zuul.yaml
etc/tacker
releasenotes/notes
tacker
api
cmd/eventlet
common
conductor
context.pydb/migration
policies
sol_refactored
api
common
config.pycoordinate.pyexceptions.pyhttp_client.pylcm_op_occ_utils.pysubscription_utils.pyvim_utils.pyvnf_instance_utils.pyvnfd_utils.py
conductor
controller
db
infra_drivers/openstack
mgmt_drivers
nfvo
objects
test-tools
tests
functional/sol_v2
__init__.pybase_v2.pyparamgen.pypkggen.pypkggen.pytest_vnflcm_basic.pyutils.py
samples
sample1
contents
BaseHOT/simple
Definitions
etsi_nfv_sol001_common_types.yamletsi_nfv_sol001_vnfd_types.yamlv2_sample1_df_simple.yamlv2_sample1_top.vnfd.yamlv2_sample1_types.yaml
TOSCA-Metadata
sample2
contents
BaseHOT/simple
Definitions
etsi_nfv_sol001_common_types.yamletsi_nfv_sol001_vnfd_types.yamlv2_sample2_df_simple.yamlv2_sample2_top.vnfd.yamlv2_sample2_types.yaml
Scripts
TOSCA-Metadata
UserData
unit/sol_refactored
10
.zuul.yaml
10
.zuul.yaml
@ -258,6 +258,15 @@
|
||||
controller-tacker:
|
||||
tox_envlist: dsvm-functional-sol
|
||||
|
||||
- job:
|
||||
name: tacker-functional-devstack-multinode-sol-v2
|
||||
parent: tacker-functional-devstack-multinode-sol
|
||||
description: |
|
||||
Multinodes job for SOL V2 devstack-based functional tests
|
||||
host-vars:
|
||||
controller-tacker:
|
||||
tox_envlist: dsvm-functional-sol-v2
|
||||
|
||||
- job:
|
||||
name: tacker-functional-devstack-multinode-sol-separated-nfvo
|
||||
parent: tacker-functional-devstack-multinode-sol
|
||||
@ -507,3 +516,4 @@
|
||||
- tacker-functional-devstack-multinode-sol-separated-nfvo
|
||||
- tacker-functional-devstack-multinode-sol-kubernetes
|
||||
- tacker-functional-devstack-multinode-libs-master
|
||||
- tacker-functional-devstack-multinode-sol-v2
|
||||
|
@ -3,7 +3,9 @@ use = egg:Paste#urlmap
|
||||
/: tackerversions
|
||||
/v1.0: tackerapi_v1_0
|
||||
/vnfpkgm/v1: vnfpkgmapi_v1
|
||||
/vnflcm: vnflcm_versions
|
||||
/vnflcm/v1: vnflcm_v1
|
||||
/vnflcm/v2: vnflcm_v2
|
||||
|
||||
[composite:tackerapi_v1_0]
|
||||
use = call:tacker.auth:pipeline_factory
|
||||
@ -20,6 +22,16 @@ use = call:tacker.auth:pipeline_factory
|
||||
noauth = request_id catch_errors vnflcmaapp_v1
|
||||
keystone = request_id catch_errors authtoken keystonecontext vnflcmaapp_v1
|
||||
|
||||
[composite:vnflcm_v2]
|
||||
use = call:tacker.auth:pipeline_factory
|
||||
noauth = request_id catch_errors vnflcmaapp_v2
|
||||
keystone = request_id catch_errors authtoken keystonecontext vnflcmaapp_v2
|
||||
|
||||
[composite:vnflcm_versions]
|
||||
use = call:tacker.auth:pipeline_factory
|
||||
noauth = request_id catch_errors vnflcm_api_versions
|
||||
keystone = request_id catch_errors authtoken keystonecontext vnflcm_api_versions
|
||||
|
||||
[filter:request_id]
|
||||
paste.filter_factory = oslo_middleware:RequestId.factory
|
||||
|
||||
@ -49,3 +61,9 @@ paste.app_factory = tacker.api.vnfpkgm.v1.router:VnfpkgmAPIRouter.factory
|
||||
|
||||
[app:vnflcmaapp_v1]
|
||||
paste.app_factory = tacker.api.vnflcm.v1.router:VnflcmAPIRouter.factory
|
||||
|
||||
[app:vnflcmaapp_v2]
|
||||
paste.app_factory = tacker.sol_refactored.api.router:VnflcmAPIRouterV2.factory
|
||||
|
||||
[app:vnflcm_api_versions]
|
||||
paste.app_factory = tacker.sol_refactored.api.router:VnflcmVersions.factory
|
||||
|
@ -0,0 +1,24 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
Support multi-version of RESTfulAPI. The client can use
|
||||
both VNF LCM API "1.3.0" and "2.0.0" defined by ETSI NFV.
|
||||
|
||||
- |
|
||||
Add new RESTful APIs of List VNF LCM API versions
|
||||
and Show VNF LCM API versions based on ETSI NFV specifications.
|
||||
They enable the client to retrieve supported versions of VNF LCM API.
|
||||
|
||||
- |
|
||||
Add the following new version of RESTful APIs
|
||||
based on ETSI NFV specifications.
|
||||
Version "2.0.0" API of Create VNF, Delete VNF,
|
||||
Instantiate VNF, Terminate VNF, List VNF, Show VNF,
|
||||
List VNF LCM operation occurrence, Show VNF LCM operation occurrence,
|
||||
Create subscription, List subscription, and Show subscription are added.
|
||||
|
||||
- |
|
||||
VNF LCM API "2.0.0" provides a new type of userdata script
|
||||
and utility functions to describe it.
|
||||
They enable the user to freely operate HEAT to meet the unique
|
||||
requirements of VNF.
|
@ -62,6 +62,18 @@ def validate_mac_address_or_none(instance):
|
||||
return True
|
||||
|
||||
|
||||
@jsonschema.FormatChecker.cls_checks('mac_address',
|
||||
webob.exc.HTTPBadRequest)
|
||||
def validate_mac_address(instance):
|
||||
"""Validate instance is a MAC address"""
|
||||
|
||||
if not netaddr.valid_mac(instance):
|
||||
msg = _("'%s' is not a valid mac address")
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg % instance)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _validate_query_parameter_without_value(parameter_name, instance):
|
||||
"""The query parameter is a flag without a value."""
|
||||
if not (isinstance(instance, str) and len(instance)):
|
||||
|
@ -562,6 +562,11 @@ class VnfLcmController(wsgi.Controller):
|
||||
|
||||
return self._view_builder.show(vnf_instance)
|
||||
|
||||
@wsgi.response(http_client.OK)
|
||||
def api_versions(self, request):
|
||||
return {'uriPrefix': '/vnflcm/v1',
|
||||
'apiVersions': [{'version': '1.3.0', 'isDeprecated': False}]}
|
||||
|
||||
@wsgi.response(http_client.OK)
|
||||
@wsgi.expected_errors((http_client.FORBIDDEN, http_client.BAD_REQUEST))
|
||||
@api_common.validate_supported_params({'filter'})
|
||||
|
@ -60,6 +60,10 @@ class VnflcmAPIRouter(wsgi.Router):
|
||||
|
||||
controller = vnf_lcm_controller.create_resource()
|
||||
|
||||
methods = {"GET": "api_versions"}
|
||||
self._setup_route(mapper, "/api_versions",
|
||||
methods, controller, default_resource)
|
||||
|
||||
# Allowed methods on /vnflcm/v1/vnf_instances resource
|
||||
methods = {"GET": "index", "POST": "create"}
|
||||
self._setup_route(mapper, "/vnf_instances",
|
||||
|
@ -26,6 +26,7 @@ from tacker._i18n import _
|
||||
from tacker.common import config
|
||||
from tacker import objects
|
||||
from tacker import service
|
||||
from tacker.sol_refactored import objects as sol_objects
|
||||
|
||||
|
||||
oslo_i18n.install("tacker")
|
||||
@ -35,6 +36,7 @@ def main():
|
||||
# the configuration will be read into the cfg.CONF global data structure
|
||||
config.init(sys.argv[1:])
|
||||
objects.register_all()
|
||||
sol_objects.register_all()
|
||||
if not cfg.CONF.config_file:
|
||||
sys.exit(_("ERROR: Unable to find configuration file via the default"
|
||||
" search paths (~/.tacker/, ~/, /etc/tacker/, /etc/) and"
|
||||
|
@ -296,11 +296,12 @@ class Connection(object):
|
||||
self.servers = []
|
||||
|
||||
def create_consumer(self, topic, endpoints, fanout=False,
|
||||
exchange='tacker', host=None):
|
||||
exchange='tacker', host=None, serializer=None):
|
||||
target = oslo_messaging.Target(
|
||||
topic=topic, server=host or cfg.CONF.host, fanout=fanout,
|
||||
exchange=exchange)
|
||||
serializer = objects_base.TackerObjectSerializer()
|
||||
if not serializer:
|
||||
serializer = objects_base.TackerObjectSerializer()
|
||||
server = get_server(target, endpoints, serializer)
|
||||
self.servers.append(server)
|
||||
|
||||
|
@ -65,6 +65,8 @@ from tacker.objects import vnfd as vnfd_db
|
||||
from tacker.objects import vnfd_attribute as vnfd_attribute_db
|
||||
from tacker.plugins.common import constants
|
||||
from tacker import service as tacker_service
|
||||
from tacker.sol_refactored.conductor import v2_hook
|
||||
from tacker.sol_refactored import objects as sol_objects
|
||||
from tacker import version
|
||||
from tacker.vnflcm import utils as vnflcm_utils
|
||||
from tacker.vnflcm import vnflcm_driver
|
||||
@ -296,7 +298,7 @@ def grant_error_common(function):
|
||||
return decorated_function
|
||||
|
||||
|
||||
class Conductor(manager.Manager):
|
||||
class Conductor(manager.Manager, v2_hook.ConductorV2Hook):
|
||||
def __init__(self, host, conf=None):
|
||||
if conf:
|
||||
self.conf = conf
|
||||
@ -2334,6 +2336,7 @@ def init(args, **kwargs):
|
||||
def main(manager='tacker.conductor.conductor_server.Conductor'):
|
||||
init(sys.argv[1:])
|
||||
objects.register_all()
|
||||
sol_objects.register_all()
|
||||
logging.setup(CONF, "tacker")
|
||||
oslo_messaging.set_transport_defaults(control_exchange='tacker')
|
||||
logging.setup(CONF, "tacker")
|
||||
|
@ -169,6 +169,7 @@ class Context(ContextBaseWithSession):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Context, self).__init__(*args, **kwargs)
|
||||
self._session = None
|
||||
self._api_version = None
|
||||
|
||||
@property
|
||||
def session(self):
|
||||
@ -180,6 +181,14 @@ class Context(ContextBaseWithSession):
|
||||
self._session = db_api.get_session()
|
||||
return self._session
|
||||
|
||||
@property
|
||||
def api_version(self):
|
||||
return self._api_version
|
||||
|
||||
@api_version.setter
|
||||
def api_version(self, api_version):
|
||||
self._api_version = api_version
|
||||
|
||||
|
||||
def get_admin_context():
|
||||
return Context(user_id=None,
|
||||
|
@ -1 +1 @@
|
||||
6dc60a5760e5
|
||||
a23ebee909a8
|
||||
|
@ -0,0 +1,97 @@
|
||||
# Copyright 2021 OpenStack Foundation
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
|
||||
"""introduce_sol_refactored_models
|
||||
|
||||
Revision ID: a23ebee909a8
|
||||
Revises: 6dc60a5760e5
|
||||
Create Date: 2021-04-20 15:33:42.686284
|
||||
|
||||
"""
|
||||
|
||||
# flake8: noqa: E402
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a23ebee909a8'
|
||||
down_revision = '6dc60a5760e5'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(active_plugins=None, options=None):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('LccnSubscriptionV2',
|
||||
sa.Column('id', sa.String(length=255), nullable=False),
|
||||
sa.Column('filter', sa.JSON(), nullable=True),
|
||||
sa.Column('callbackUri', sa.String(length=255), nullable=False),
|
||||
sa.Column('authentication', sa.JSON(), nullable=True),
|
||||
sa.Column('verbosity', sa.Enum('FULL', 'SHORT'), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
|
||||
op.create_table('VnfInstanceV2',
|
||||
sa.Column('id', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfInstanceName', sa.String(length=255), nullable=True),
|
||||
sa.Column('vnfInstanceDescription', sa.Text(), nullable=True),
|
||||
sa.Column('vnfdId', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfProvider', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfProductName', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfSoftwareVersion', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfdVersion', sa.String(length=255), nullable=False),
|
||||
sa.Column('vnfConfigurableProperties', sa.JSON(), nullable=True),
|
||||
sa.Column('vimConnectionInfo', sa.JSON(), nullable=True),
|
||||
sa.Column('instantiationState',
|
||||
sa.Enum('NOT_INSTANTIATED', 'INSTANTIATED'),
|
||||
nullable=False),
|
||||
sa.Column('instantiatedVnfInfo', sa.JSON(), nullable=True),
|
||||
sa.Column('metadata', sa.JSON(), nullable=True),
|
||||
sa.Column('extensions', sa.JSON(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
|
||||
op.create_table('VnfLcmOpOccV2',
|
||||
sa.Column('id', sa.String(length=255), nullable=False),
|
||||
sa.Column('operationState',
|
||||
sa.Enum('STARTING', 'PROCESSING', 'COMPLETED', 'FAILED_TEMP',
|
||||
'FAILED', 'ROLLING_BACK', 'ROLLED_BACK'),
|
||||
nullable=False),
|
||||
sa.Column('stateEnteredTime', sa.DateTime(), nullable=False),
|
||||
sa.Column('startTime', sa.DateTime(), nullable=False),
|
||||
sa.Column('vnfInstanceId', sa.String(length=255), nullable=False),
|
||||
sa.Column('grantId', sa.String(length=255), nullable=True),
|
||||
sa.Column('operation',
|
||||
sa.Enum('INSTANTIATE', 'SCALE', 'SCALE_TO_LEVEL',
|
||||
'CHANGE_FLAVOUR', 'TERMINATE', 'HEAL', 'OPERATE',
|
||||
'CHANGE_EXT_CONN', 'MODIFY_INFO', 'CREATE_SNAPSHOT',
|
||||
'REVERT_TO_SNAPSHOT', 'CHANGE_VNFPKG'),
|
||||
nullable=False),
|
||||
sa.Column('isAutomaticInvocation', sa.Boolean(), nullable=False),
|
||||
sa.Column('operationParams', sa.JSON(), nullable=True),
|
||||
sa.Column('isCancelPending', sa.Boolean(), nullable=False),
|
||||
sa.Column('cancelMode', sa.Enum('GRACEFUL', 'FORCEFUL'), nullable=True),
|
||||
sa.Column('error', sa.JSON(), nullable=True),
|
||||
sa.Column('resourceChanges', sa.JSON(), nullable=True),
|
||||
sa.Column('changedInfo', sa.JSON(), nullable=True),
|
||||
sa.Column('changedExtConnectivity', sa.JSON(), nullable=True),
|
||||
sa.Column('modificationsTriggeredByVnfPkgChange', sa.JSON(),
|
||||
nullable=True),
|
||||
sa.Column('vnfSnapshotInfoId', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_engine='InnoDB'
|
||||
)
|
||||
# ### end Alembic commands ###
|
@ -21,12 +21,9 @@ Based on this comparison database can be healed with healing migration.
|
||||
|
||||
"""
|
||||
|
||||
from tacker.db import model_base
|
||||
from tacker.db import model_base # noqa
|
||||
from tacker.db.nfvo import nfvo_db # noqa
|
||||
from tacker.db.nfvo import ns_db # noqa
|
||||
from tacker.db.nfvo import vnffg_db # noqa
|
||||
from tacker.db.vnfm import vnfm_db # noqa
|
||||
|
||||
|
||||
def get_metadata():
|
||||
return model_base.BASE.metadata
|
||||
from tacker.sol_refactored.db.sqlalchemy import models # noqa
|
||||
|
@ -19,6 +19,7 @@ import itertools
|
||||
from tacker.policies import base
|
||||
from tacker.policies import vnf_lcm
|
||||
from tacker.policies import vnf_package
|
||||
from tacker.sol_refactored.api.policies import vnflcm_v2
|
||||
|
||||
|
||||
def list_rules():
|
||||
@ -26,4 +27,5 @@ def list_rules():
|
||||
base.list_rules(),
|
||||
vnf_package.list_rules(),
|
||||
vnf_lcm.list_rules(),
|
||||
vnflcm_v2.list_rules(),
|
||||
)
|
||||
|
@ -22,6 +22,17 @@ from tacker.policies import base
|
||||
VNFLCM = 'os_nfv_orchestration_api:vnf_instances:%s'
|
||||
|
||||
rules = [
|
||||
policy.DocumentedRuleDefault(
|
||||
name=VNFLCM % 'api_versions',
|
||||
check_str=base.RULE_ANY,
|
||||
description="Get API Versions.",
|
||||
operations=[
|
||||
{
|
||||
'method': 'GET',
|
||||
'path': '/vnflcm/v1/api_versions'
|
||||
}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=VNFLCM % 'create',
|
||||
check_str=base.RULE_ADMIN_OR_OWNER,
|
||||
|
115
tacker/sol_refactored/api/api_version.py
Normal file
115
tacker/sol_refactored/api/api_version.py
Normal file
@ -0,0 +1,115 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import re
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
|
||||
|
||||
supported_versions_v1 = {
|
||||
'uriPrefix': '/vnflcm/v1',
|
||||
'apiVersions': [
|
||||
{'version': '1.3.0', 'isDeprecated': False}
|
||||
]
|
||||
}
|
||||
|
||||
supported_versions_v2 = {
|
||||
'uriPrefix': '/vnflcm/v2',
|
||||
'apiVersions': [
|
||||
{'version': '2.0.0', 'isDeprecated': False}
|
||||
]
|
||||
}
|
||||
|
||||
CURRENT_VERSION = '2.0.0'
|
||||
|
||||
supported_versions = [
|
||||
item['version'] for item in supported_versions_v2['apiVersions']
|
||||
]
|
||||
|
||||
|
||||
class APIVersion(object):
|
||||
|
||||
def __init__(self, version_string=None):
|
||||
self.ver_major = 0
|
||||
self.ver_minor = 0
|
||||
self.ver_patch = 0
|
||||
|
||||
if version_string is None:
|
||||
return
|
||||
|
||||
version_string = self._get_version_id(version_string)
|
||||
match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)\.([1-9]\d*|0)$",
|
||||
version_string)
|
||||
if match:
|
||||
self.ver_major = int(match.group(1))
|
||||
self.ver_minor = int(match.group(2))
|
||||
self.ver_patch = int(match.group(3))
|
||||
else:
|
||||
raise sol_ex.InvalidAPIVersionString(version=version_string)
|
||||
|
||||
if version_string not in supported_versions:
|
||||
raise sol_ex.APIVersionNotSupported(version=version_string)
|
||||
|
||||
def _get_version_id(self, version_string):
|
||||
# version example (see. SOL013 Table 4.2.2-1)
|
||||
# `1.2.0` or `1.2.0-impl:example.com:myProduct:4`
|
||||
# This method checks the later case and return the part of
|
||||
# version identifier. check is loose.
|
||||
if '-' not in version_string:
|
||||
return version_string
|
||||
items = version_string.split('-')
|
||||
if len(items) == 2 and items[1].startswith("impl:"):
|
||||
return items[0]
|
||||
raise sol_ex.InvalidAPIVersionString(version=version_string)
|
||||
|
||||
def is_null(self):
|
||||
return (self.ver_major, self.ver_minor, self.ver_patch) == (0, 0, 0)
|
||||
|
||||
def __str__(self):
|
||||
return "%d.%d.%d" % (self.ver_major, self.ver_minor, self.ver_patch)
|
||||
|
||||
def __lt__(self, other):
|
||||
return ((self.ver_major, self.ver_minor, self.ver_patch) <
|
||||
(other.ver_major, other.ver_minor, other.ver_patch))
|
||||
|
||||
def __eq__(self, other):
|
||||
return ((self.ver_major, self.ver_minor, self.ver_patch) ==
|
||||
(other.ver_major, other.ver_minor, other.ver_patch))
|
||||
|
||||
def __gt__(self, other):
|
||||
return ((self.ver_major, self.ver_minor, self.ver_patch) >
|
||||
(other.ver_major, other.ver_minor, other.ver_patch))
|
||||
|
||||
def __le__(self, other):
|
||||
return self < other or self == other
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __ge__(self, other):
|
||||
return self > other or self == other
|
||||
|
||||
def matches(self, min_version, max_version):
|
||||
if self.is_null():
|
||||
return False
|
||||
if max_version.is_null() and min_version.is_null():
|
||||
return True
|
||||
elif max_version.is_null():
|
||||
return min_version <= self
|
||||
elif min_version.is_null():
|
||||
return self <= max_version
|
||||
else:
|
||||
return min_version <= self <= max_version
|
170
tacker/sol_refactored/api/policies/vnflcm_v2.py
Normal file
170
tacker/sol_refactored/api/policies/vnflcm_v2.py
Normal file
@ -0,0 +1,170 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_policy import policy
|
||||
|
||||
|
||||
POLICY_NAME = 'os_nfv_orchestration_api_v2:vnf_instances:{}'
|
||||
RULE_ANY = '@'
|
||||
|
||||
V2_PATH = '/vnflcm/v2'
|
||||
API_VERSIONS_PATH = V2_PATH + '/api_versions'
|
||||
VNF_INSTANCES_PATH = V2_PATH + '/vnf_instances'
|
||||
VNF_INSTANCES_ID_PATH = VNF_INSTANCES_PATH + '/{vnfInstanceId}'
|
||||
SUBSCRIPTIONS_PATH = V2_PATH + '/subscriptions'
|
||||
SUBSCRIPTIONS_ID_PATH = VNF_INSTANCES_PATH + '/{subscriptionId}'
|
||||
VNF_LCM_OP_OCCS_PATH = V2_PATH + '/vnf_lcm_op_occs'
|
||||
VNF_LCM_OP_OCCS_ID_PATH = VNF_LCM_OP_OCCS_PATH + '/{vnfLcmOpOccId}'
|
||||
|
||||
rules = [
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('api_versions'),
|
||||
check_str=RULE_ANY,
|
||||
description="Get API Versions.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': API_VERSIONS_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('create'),
|
||||
check_str=RULE_ANY,
|
||||
description="Creates vnf instance.",
|
||||
operations=[
|
||||
{'method': 'POST',
|
||||
'path': VNF_INSTANCES_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('index'),
|
||||
check_str=RULE_ANY,
|
||||
description="Query VNF instances.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': VNF_INSTANCES_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('show'),
|
||||
check_str=RULE_ANY,
|
||||
description="Query an Individual VNF instance.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': VNF_INSTANCES_ID_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('delete'),
|
||||
check_str=RULE_ANY,
|
||||
description="Delete an Individual VNF instance.",
|
||||
operations=[
|
||||
{'method': 'DELETE',
|
||||
'path': VNF_INSTANCES_ID_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('instantiate'),
|
||||
check_str=RULE_ANY,
|
||||
description="Instantiate vnf instance.",
|
||||
operations=[
|
||||
{'method': 'POST',
|
||||
'path': VNF_INSTANCES_ID_PATH + '/instantiate'}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('terminate'),
|
||||
check_str=RULE_ANY,
|
||||
description="Terminate vnf instance.",
|
||||
operations=[
|
||||
{'method': 'POST',
|
||||
'path': VNF_INSTANCES_ID_PATH + '/terminate'}
|
||||
]
|
||||
),
|
||||
|
||||
# TODO(oda-g): add more lcm operations etc when implemented.
|
||||
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('subscription_create'),
|
||||
check_str=RULE_ANY,
|
||||
description="Create subscription.",
|
||||
operations=[
|
||||
{'method': 'POST',
|
||||
'path': SUBSCRIPTIONS_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('subscription_list'),
|
||||
check_str=RULE_ANY,
|
||||
description="List subscription.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': SUBSCRIPTIONS_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('subscription_show'),
|
||||
check_str=RULE_ANY,
|
||||
description="Show subscription.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': SUBSCRIPTIONS_ID_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('subscription_delete'),
|
||||
check_str=RULE_ANY,
|
||||
description="Delete subscription.",
|
||||
operations=[
|
||||
{'method': 'DELETE',
|
||||
'path': SUBSCRIPTIONS_ID_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('lcm_op_occ_list'),
|
||||
check_str=RULE_ANY,
|
||||
description="List VnfLcmOpOcc.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': VNF_LCM_OP_OCCS_PATH}
|
||||
]
|
||||
),
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('lcm_op_occ_show'),
|
||||
check_str=RULE_ANY,
|
||||
description="Show VnfLcmOpOcc.",
|
||||
operations=[
|
||||
{'method': 'GET',
|
||||
'path': VNF_LCM_OP_OCCS_ID_PATH}
|
||||
]
|
||||
),
|
||||
# NOTE: 'DELETE' is not defined in the specification. It is for test
|
||||
# use since it is convenient to be able to delete under development.
|
||||
# It is available when config parameter
|
||||
# v2_vnfm.test_enable_lcm_op_occ_delete set to True.
|
||||
policy.DocumentedRuleDefault(
|
||||
name=POLICY_NAME.format('lcm_op_occ_delete'),
|
||||
check_str=RULE_ANY,
|
||||
description="Delete VnfLcmOpOcc.",
|
||||
operations=[
|
||||
{'method': 'DELETE',
|
||||
'path': VNF_LCM_OP_OCCS_ID_PATH}
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def list_rules():
|
||||
return rules
|
54
tacker/sol_refactored/api/router.py
Normal file
54
tacker/sol_refactored/api/router.py
Normal file
@ -0,0 +1,54 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from tacker.sol_refactored.api.policies import vnflcm_v2 as vnflcm_policy_v2
|
||||
from tacker.sol_refactored.api import wsgi as sol_wsgi
|
||||
from tacker.sol_refactored.controller import vnflcm_v2
|
||||
from tacker.sol_refactored.controller import vnflcm_versions
|
||||
|
||||
|
||||
class VnflcmVersions(sol_wsgi.SolAPIRouter):
|
||||
|
||||
controller = sol_wsgi.SolResource(
|
||||
vnflcm_versions.VnfLcmVersionsController())
|
||||
route_list = [("/api_versions", {"GET": "index"})]
|
||||
|
||||
|
||||
class VnflcmAPIRouterV2(sol_wsgi.SolAPIRouter):
|
||||
|
||||
controller = sol_wsgi.SolResource(vnflcm_v2.VnfLcmControllerV2(),
|
||||
policy_name=vnflcm_policy_v2.POLICY_NAME)
|
||||
route_list = [
|
||||
("/vnf_instances", {"GET": "index", "POST": "create"}),
|
||||
("/vnf_instances/{id}",
|
||||
{"DELETE": "delete", "GET": "show", "PATCH": "update"}),
|
||||
("/vnf_instances/{id}/instantiate", {"POST": "instantiate"}),
|
||||
("/vnf_instances/{id}/heal", {"POST": "heal"}),
|
||||
("/vnf_instances/{id}/terminate", {"POST": "terminate"}),
|
||||
("/vnf_instances/{id}/scale", {"POST": "scale"}),
|
||||
("/api_versions", {"GET": "api_versions"}),
|
||||
("/subscriptions", {"GET": "subscription_list",
|
||||
"POST": "subscription_create"}),
|
||||
("/subscriptions/{id}", {"GET": "subscription_show",
|
||||
"DELETE": "subscription_delete"}),
|
||||
("/vnf_lcm_op_occs", {"GET": "lcm_op_occ_list"}),
|
||||
# NOTE: 'DELETE' is not defined in the specification. It is for test
|
||||
# use since it is convenient to be able to delete under development.
|
||||
# It is available when config parameter
|
||||
# v2_vnfm.test_enable_lcm_op_occ_delete set to True.
|
||||
("/vnf_lcm_op_occs/{id}", {"GET": "lcm_op_occ_show",
|
||||
"DELETE": "lcm_op_occ_delete"})
|
||||
]
|
0
tacker/sol_refactored/api/schemas/__init__.py
Normal file
0
tacker/sol_refactored/api/schemas/__init__.py
Normal file
244
tacker/sol_refactored/api/schemas/common_types.py
Normal file
244
tacker/sol_refactored/api/schemas/common_types.py
Normal file
@ -0,0 +1,244 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from tacker.api.validation import parameter_types
|
||||
|
||||
|
||||
# SOL013 7.2.2
|
||||
Identifier = {
|
||||
'type': 'string', 'minLength': 1, 'maxLength': 255
|
||||
}
|
||||
|
||||
# SOL003 4.4.2.2
|
||||
IdentifierInVnfd = {
|
||||
'type': 'string', 'minLength': 1, 'maxLength': 255
|
||||
}
|
||||
|
||||
# SOL003 4.4.2.2
|
||||
IdentifierInVim = {
|
||||
'type': 'string', 'minLength': 1, 'maxLength': 255
|
||||
}
|
||||
|
||||
# SOL003 4.4.2.2
|
||||
IdentifierInVnf = {
|
||||
'type': 'string', 'minLength': 1, 'maxLength': 255
|
||||
}
|
||||
|
||||
# SOL003 4.4.2.2
|
||||
IdentifierLocal = {
|
||||
'type': 'string', 'minLength': 1, 'maxLength': 255
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.7
|
||||
ResourceHandle = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vimConnectionId': Identifier,
|
||||
'resourceProviderId': Identifier,
|
||||
'resourceId': IdentifierInVim,
|
||||
'vimLevelResourceType': {'type': 'string', 'maxLength': 255},
|
||||
},
|
||||
'required': ['resourceId'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.6
|
||||
VimConnectionInfo = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vimId': Identifier,
|
||||
'vimType': {'type': 'string', 'minLength': 1, 'maxLength': 255},
|
||||
'interfaceInfo': parameter_types.keyvalue_pairs,
|
||||
'accessInfo': parameter_types.keyvalue_pairs,
|
||||
'extra': parameter_types.keyvalue_pairs,
|
||||
},
|
||||
'required': ['vimType'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.10c (inner)
|
||||
_IpAddresses = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': {'enum': ('IPV4', 'IPV6')},
|
||||
'fixedAddresses': {'type': 'array'},
|
||||
'numDynamicAddresses': parameter_types.positive_integer,
|
||||
'addressRange': {'type': 'object'},
|
||||
'subnetId': IdentifierInVim
|
||||
},
|
||||
'if': {'properties': {'type': {'const': 'IPV4'}}},
|
||||
'then': {
|
||||
'properties': {
|
||||
'fixedAddresses': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string', 'format': 'ipv4'}
|
||||
},
|
||||
'addressRange': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'minAddress': {'type': 'string', 'format': 'ipv4'},
|
||||
'maxAddress': {'type': 'string', 'format': 'ipv4'}
|
||||
},
|
||||
'required': ['minAddress', 'maxAddress'],
|
||||
'additionalProperties': True
|
||||
},
|
||||
}
|
||||
},
|
||||
'else': {
|
||||
'properties': {
|
||||
'fixedAddresses': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string', 'format': 'ipv6'}
|
||||
},
|
||||
'addressRange': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'minAddress': {'type': 'string', 'format': 'ipv6'},
|
||||
'maxAddress': {'type': 'string', 'format': 'ipv6'}
|
||||
},
|
||||
'required': ['minAddress', 'maxAddress'],
|
||||
'additionalProperties': True
|
||||
},
|
||||
}
|
||||
},
|
||||
'required': ['type'],
|
||||
'oneOf': [
|
||||
{'required': ['numDynamicAddresses']},
|
||||
{'required': ['fixedAddresses']},
|
||||
{'required': ['addressRange']},
|
||||
],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.10c
|
||||
IpOverEthernetAddressData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'macAddress': {'type': 'string', 'format': 'mac_address'},
|
||||
'segmentationId': {'type': 'string'},
|
||||
'ipAddresses': {
|
||||
'type': 'array',
|
||||
'items': _IpAddresses}
|
||||
},
|
||||
'anyOf': [
|
||||
{'required': ['macAddress']},
|
||||
{'required': ['ipAddresses']}
|
||||
],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.10b
|
||||
CpProtocolData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'layerProtocol': {
|
||||
'type': 'string',
|
||||
'enum': 'IP_OVER_ETHERNET'},
|
||||
'ipOverEthernet': IpOverEthernetAddressData,
|
||||
},
|
||||
'required': ['layerProtocol'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.10a
|
||||
VnfExtCpConfig = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'parentCpConfigId': IdentifierInVnf,
|
||||
'linkPortId': Identifier,
|
||||
'cpProtocolData': {
|
||||
'type': 'array',
|
||||
'items': CpProtocolData}
|
||||
},
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.10
|
||||
VnfExtCpData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'cpdId': IdentifierInVnfd,
|
||||
'cpConfig': {
|
||||
'type': 'object',
|
||||
'minProperties': 1,
|
||||
'patternProperties': {
|
||||
'^.*$': VnfExtCpConfig
|
||||
}
|
||||
}
|
||||
},
|
||||
'required': ['cpdId', 'cpConfig'],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.14
|
||||
ExtLinkPortData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': Identifier,
|
||||
'resourceHandle': ResourceHandle,
|
||||
},
|
||||
'required': ['id', 'resourceHandle'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.11
|
||||
ExtVirtualLinkData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': Identifier,
|
||||
'vimConnectionId': Identifier,
|
||||
'resourceProviderId': Identifier,
|
||||
'resourceId': IdentifierInVim,
|
||||
'extCps': {
|
||||
'type': 'array',
|
||||
'minItems': 1,
|
||||
'items': VnfExtCpData},
|
||||
'extLinkPorts': {
|
||||
'type': 'array',
|
||||
'items': ExtLinkPortData}
|
||||
},
|
||||
'required': ['id', 'resourceId', 'extCps'],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
# SOL003 5.5.3.18
|
||||
VnfLinkPortData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfLinkPortId': Identifier,
|
||||
'resourceHandle': ResourceHandle
|
||||
},
|
||||
'required': ['vnfLinkPortId', 'resourceHandle'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.12
|
||||
ExtManagedVirtualLinkData = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': Identifier,
|
||||
'vnfVirtualLinkDescId': IdentifierInVnfd,
|
||||
'vimConnectionId': Identifier,
|
||||
'resourceProviderId': Identifier,
|
||||
'resourceId': IdentifierInVim,
|
||||
'vnfLinkPort': {
|
||||
'type': 'array',
|
||||
'items': VnfLinkPortData},
|
||||
'extManagedMultisiteVirtualLinkId': Identifier
|
||||
},
|
||||
'required': ['id', 'vnfVirtualLinkDescId', 'resourceId'],
|
||||
'additionalProperties': True,
|
||||
}
|
252
tacker/sol_refactored/api/schemas/vnflcm_v2.py
Normal file
252
tacker/sol_refactored/api/schemas/vnflcm_v2.py
Normal file
@ -0,0 +1,252 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tacker.api.validation import parameter_types
|
||||
|
||||
from tacker.sol_refactored.api.schemas import common_types
|
||||
|
||||
|
||||
# SOL003 5.5.2.3
|
||||
CreateVnfRequest_V200 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfdId': common_types.Identifier,
|
||||
'vnfInstanceName': {'type': 'string', 'maxLength': 255},
|
||||
'vnfInstanceDescription': {'type': 'string', 'maxLength': 1024},
|
||||
'metadata': parameter_types.keyvalue_pairs,
|
||||
},
|
||||
'required': ['vnfdId'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 5.5.2.4
|
||||
InstantiateVnfRequest_V200 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'flavourId': common_types.IdentifierInVnfd,
|
||||
'instantiationLevelId': common_types.IdentifierInVnfd,
|
||||
'extVirtualLinks': {
|
||||
'type': 'array',
|
||||
'items': common_types.ExtVirtualLinkData},
|
||||
'extManagedVirtualLinks': {
|
||||
'type': 'array',
|
||||
'items': common_types.ExtManagedVirtualLinkData},
|
||||
'vimConnectionInfo': {
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
'^.*$': common_types.VimConnectionInfo
|
||||
},
|
||||
},
|
||||
'localizationLanguage': {'type': 'string', 'maxLength': 255},
|
||||
'additionalParams': parameter_types.keyvalue_pairs,
|
||||
'extensions': parameter_types.keyvalue_pairs,
|
||||
'vnfConfigurableProperties': parameter_types.keyvalue_pairs
|
||||
},
|
||||
'required': ['flavourId'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 5.5.2.8
|
||||
TerminateVnfRequest_V200 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'terminationType': {
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'FORCEFUL',
|
||||
'GRACEFUL']
|
||||
},
|
||||
'gracefulTerminationTimeout': {
|
||||
'type': 'integer', 'minimum': 1
|
||||
},
|
||||
'additionalParams': parameter_types.keyvalue_pairs,
|
||||
},
|
||||
'required': ['terminationType'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL013 8.3.4
|
||||
_SubscriptionAuthentication = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'authType': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'BASIC',
|
||||
'OAUTH2_CLIENT_CREDENTIALS',
|
||||
'TLS_CERT']
|
||||
}
|
||||
},
|
||||
'paramsBasic': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'userName': {'type': 'string'},
|
||||
'password': {'type': 'string'}
|
||||
},
|
||||
# NOTE: must be specified since the way to specify them out of
|
||||
# band is not supported.
|
||||
'required': ['userName', 'password']
|
||||
},
|
||||
'paramsOauth2ClientCredentials': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'clientId': {'type': 'string'},
|
||||
'clientPassword': {'type': 'string'},
|
||||
'tokenEndpoint': {'type': 'string'}
|
||||
},
|
||||
# NOTE: must be specified since the way to specify them out of
|
||||
# band is not supported.
|
||||
'required': ['clientId', 'clientPassword', 'tokenEndpoint']
|
||||
}
|
||||
},
|
||||
'required': ['authType'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.5 inner
|
||||
_VnfProductVersions = {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'objects',
|
||||
'properties': {
|
||||
'vnfSoftwareVersion': {'type': 'string'},
|
||||
'vnfdVersions': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
}
|
||||
},
|
||||
'required': ['vnfSoftwareVersion'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.5 inner
|
||||
_VnfProducts = {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfProductName': {'type': 'string'},
|
||||
'versions': _VnfProductVersions
|
||||
},
|
||||
'required': ['vnfProductName'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.5 inner
|
||||
_VnfProductsFromProviders = {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfProvider': {'type': 'string'},
|
||||
'vnfProducts': _VnfProducts
|
||||
}
|
||||
},
|
||||
'required': ['vnfProvider'],
|
||||
'additionalProperties': True,
|
||||
}
|
||||
}
|
||||
|
||||
# SOL003 4.4.1.5
|
||||
_VnfInstanceSubscriptionFilter = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfdIds': {
|
||||
'type': 'array',
|
||||
'items': common_types.Identifier
|
||||
},
|
||||
'vnfProductsFromProviders': _VnfProductsFromProviders,
|
||||
'vnfInstanceIds': {
|
||||
'type': 'array',
|
||||
'items': common_types.Identifier
|
||||
},
|
||||
'vnfInstanceNames': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'}
|
||||
}
|
||||
},
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 5.5.3.12
|
||||
_LifecycleChangeNotificationsFilter = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfInstanceSubscriptionFilter': _VnfInstanceSubscriptionFilter,
|
||||
'notificationTypes': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'VnfLcmOperationOccurrenceNotification',
|
||||
'VnfIdentifierCreationNotification',
|
||||
'VnfIdentifierDeletionNotification']
|
||||
}
|
||||
},
|
||||
'operationTypes': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'INSTANTIATE',
|
||||
'SCALE',
|
||||
'SCALE_TO_LEVEL',
|
||||
'CHANGE_FLAVOUR',
|
||||
'TERMINATE',
|
||||
'HEAL',
|
||||
'OPERATE',
|
||||
'CHANGE_EXT_CONN',
|
||||
'MODIFY_INFO']
|
||||
}
|
||||
},
|
||||
'operationStates': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'STARTING',
|
||||
'PROCESSING',
|
||||
'COMPLETED',
|
||||
'FAILED_TEMP',
|
||||
'FAILED',
|
||||
'ROLLING_BACK',
|
||||
'ROLLED_BACK']
|
||||
}
|
||||
}
|
||||
},
|
||||
'additionalProperties': True,
|
||||
}
|
||||
|
||||
# SOL003 5.5.2.15
|
||||
LccnSubscriptionRequest_V200 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'filter': _LifecycleChangeNotificationsFilter,
|
||||
'callbackUri': {'type': 'string', 'maxLength': 255},
|
||||
'authentication': _SubscriptionAuthentication,
|
||||
'verbosity': {
|
||||
'type': 'string',
|
||||
'enum': ['FULL', 'SHORT']
|
||||
}
|
||||
},
|
||||
'required': ['callbackUri'],
|
||||
'additionalProperties': True,
|
||||
}
|
49
tacker/sol_refactored/api/validator.py
Normal file
49
tacker/sol_refactored/api/validator.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import functools
|
||||
|
||||
from tacker.api.validation import validators
|
||||
from tacker.common import exceptions as tacker_ex
|
||||
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
|
||||
|
||||
class SolSchemaValidator(validators._SchemaValidator):
|
||||
def validate(self, *args, **kwargs):
|
||||
try:
|
||||
super(SolSchemaValidator, self).validate(*args, **kwargs)
|
||||
except tacker_ex.ValidationError as ex:
|
||||
raise sol_ex.SolValidationError(detail=str(ex))
|
||||
|
||||
|
||||
def schema(request_body_schema, min_version, max_version=None):
|
||||
|
||||
def add_validator(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
ver = kwargs['request'].context.api_version
|
||||
min_ver = api_version.APIVersion(min_version)
|
||||
max_ver = api_version.APIVersion(max_version)
|
||||
if ver.matches(min_ver, max_ver):
|
||||
schema_validator = SolSchemaValidator(request_body_schema)
|
||||
schema_validator.validate(kwargs['body'])
|
||||
|
||||
return func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
return add_validator
|
180
tacker/sol_refactored/api/wsgi.py
Normal file
180
tacker/sol_refactored/api/wsgi.py
Normal file
@ -0,0 +1,180 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import routes
|
||||
import webob
|
||||
|
||||
import oslo_i18n as i18n
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.common import exceptions as common_ex
|
||||
from tacker import wsgi
|
||||
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SolResponse(object):
|
||||
|
||||
# SOL013 4.2.3 Response header field
|
||||
allowed_headers = ['version', 'location', 'content_type',
|
||||
'www_authenticate', 'accept_ranges', 'content_range',
|
||||
'retry_after', 'link']
|
||||
|
||||
def __init__(self, status, body, **kwargs):
|
||||
self.status = status
|
||||
self.body = body
|
||||
self.headers = {}
|
||||
for hdr in self.allowed_headers:
|
||||
if hdr in kwargs:
|
||||
self.headers[hdr] = kwargs[hdr]
|
||||
self.headers.setdefault('version', api_version.CURRENT_VERSION)
|
||||
self.headers.setdefault('accept-ranges', 'none')
|
||||
|
||||
def serialize(self, request, content_type):
|
||||
self.headers.setdefault('content_type', content_type)
|
||||
content_type = self.headers['content_type']
|
||||
if self.body is None:
|
||||
body = None
|
||||
elif content_type == 'text/plain':
|
||||
body = self.body
|
||||
elif content_type == 'application/zip':
|
||||
body = self.body
|
||||
else: # 'application/json'
|
||||
serializer = wsgi.JSONDictSerializer()
|
||||
body = serializer.serialize(self.body)
|
||||
if len(body) > config.CONF.v2_vnfm.max_content_length:
|
||||
raise sol_ex.ResponseTooBig(
|
||||
size=config.CONF.v2_vnfm.max_content_length)
|
||||
response = webob.Response(body=body)
|
||||
response.status_int = self.status
|
||||
for hdr, val in self.headers.items():
|
||||
response.headers[hdr.replace('_', '-')] = val
|
||||
return response
|
||||
|
||||
|
||||
class SolErrorResponse(SolResponse):
|
||||
|
||||
def __init__(self, ex, req):
|
||||
user_locale = req.best_match_language()
|
||||
problem_details = {}
|
||||
if isinstance(ex, sol_ex.SolException):
|
||||
problem_details = ex.make_problem_details()
|
||||
# translate detail
|
||||
detail = i18n.translate(problem_details['detail'], user_locale)
|
||||
problem_details['detail'] = detail
|
||||
elif isinstance(ex, (common_ex.TackerException,
|
||||
webob.exc.HTTPException)):
|
||||
LOG.warning("legacy Exception used. Use SolException instead.")
|
||||
problem_details['status'] = ex.code
|
||||
problem_details['detail'] = str(ex)
|
||||
else:
|
||||
# program bug. it occurs only under development.
|
||||
LOG.exception("Unknown error")
|
||||
problem_details['status'] = 500
|
||||
problem_details['detail'] = str(ex)
|
||||
|
||||
super(SolErrorResponse, self).__init__(problem_details['status'],
|
||||
problem_details)
|
||||
|
||||
|
||||
class SolResource(wsgi.Application):
|
||||
|
||||
def __init__(self, controller, policy_name=None):
|
||||
self.controller = controller
|
||||
self.policy_name = policy_name
|
||||
self.deserializer = wsgi.RequestDeserializer()
|
||||
|
||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||
def __call__(self, request):
|
||||
LOG.info("%(method)s %(url)s", {"method": request.method,
|
||||
"url": request.url})
|
||||
try:
|
||||
action, args, accept = self.deserializer.deserialize(request)
|
||||
self.check_api_version(request)
|
||||
self.check_policy(request, action)
|
||||
result = self.dispatch(request, action, args)
|
||||
response = result.serialize(request, accept)
|
||||
except Exception as ex:
|
||||
result = SolErrorResponse(ex, request)
|
||||
try:
|
||||
response = result.serialize(request,
|
||||
'application/problem+json')
|
||||
except Exception:
|
||||
LOG.exception("Unknown error")
|
||||
return webob.exc.HTTPBadRequest(explanation="Unknown error")
|
||||
|
||||
LOG.info("%(url)s returned with HTTP %(status)d",
|
||||
{"url": request.url, "status": response.status_int})
|
||||
|
||||
return response
|
||||
|
||||
def check_api_version(self, request):
|
||||
# check and set api_version
|
||||
ver = request.headers.get("Version")
|
||||
if ver is None:
|
||||
LOG.info("Version missing")
|
||||
raise sol_ex.APIVersionMissing()
|
||||
request.context.api_version = api_version.APIVersion(ver)
|
||||
|
||||
def check_policy(self, request, action):
|
||||
if self.policy_name is None:
|
||||
return
|
||||
if action == 'reject':
|
||||
return
|
||||
request.context.can(self.policy_name.format(action))
|
||||
|
||||
def dispatch(self, request, action, action_args):
|
||||
controller_method = getattr(self.controller, action)
|
||||
return controller_method(request=request, **action_args)
|
||||
|
||||
|
||||
class SolAPIRouter(wsgi.Router):
|
||||
|
||||
controller = None
|
||||
route_list = {}
|
||||
|
||||
def __init__(self):
|
||||
super(SolAPIRouter, self).__init__(routes.Mapper())
|
||||
|
||||
def _setup_routes(self, mapper):
|
||||
for path, methods in self.route_list:
|
||||
self._setup_route(mapper, path, methods)
|
||||
|
||||
def _setup_route(self, mapper, path, methods):
|
||||
|
||||
for method, action in methods.items():
|
||||
mapper.connect(path,
|
||||
controller=self.controller,
|
||||
action=action,
|
||||
conditions={'method': [method]})
|
||||
|
||||
all_methods = ['HEAD', 'GET', 'POST', 'PUT', 'PATCH', 'DELETE']
|
||||
missing_methods = [m for m in all_methods if m not in methods]
|
||||
if missing_methods:
|
||||
mapper.connect(path,
|
||||
controller=self.controller,
|
||||
action='reject',
|
||||
conditions={'method': missing_methods})
|
||||
|
||||
|
||||
class SolAPIController(object):
|
||||
|
||||
def reject(self, request, **kwargs):
|
||||
raise sol_ex.MethodNotAllowed(method=request.method)
|
79
tacker/sol_refactored/common/config.py
Normal file
79
tacker/sol_refactored/common/config.py
Normal file
@ -0,0 +1,79 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
VNFM_OPTS = [
|
||||
cfg.StrOpt('endpoint',
|
||||
default='http://127.0.0.1:9890',
|
||||
help='Endpoint of VNFM (self).'),
|
||||
cfg.IntOpt('default_graceful_termination_timeout',
|
||||
default=10,
|
||||
help='Default timeout value (second) of GRACEFUL termination.'),
|
||||
cfg.IntOpt('max_content_length',
|
||||
default=1000000,
|
||||
help='Max content length for list APIs.'),
|
||||
cfg.IntOpt('openstack_vim_stack_create_timeout',
|
||||
default=20,
|
||||
help='Timeout (in minuts) of heat stack creation.'),
|
||||
# NOTE: This is for test use since it is convenient to be able to delete
|
||||
# under development.
|
||||
cfg.BoolOpt('test_enable_lcm_op_occ_delete',
|
||||
default=False,
|
||||
help=_('Enable to delete LCM operation occurrence if True. '
|
||||
'This is intended to use under development.')),
|
||||
]
|
||||
|
||||
CONF.register_opts(VNFM_OPTS, 'v2_vnfm')
|
||||
|
||||
NFVO_OPTS = [
|
||||
cfg.BoolOpt('use_external_nfvo',
|
||||
default=False,
|
||||
help=_('Use external NFVO if True, '
|
||||
'use internal NFVO in tacker if False')),
|
||||
cfg.StrOpt('grant_api_version',
|
||||
default='1.4.0', # SOL003 v3.3.1 9.1a
|
||||
help='Grant api_version of NFVO.'),
|
||||
cfg.StrOpt('vnfpkgm_api_version',
|
||||
default='2.1.0', # SOL003 v3.3.1 10.1a
|
||||
help='Vnf package management api_version of NFVO.'),
|
||||
# The following four parameters are for external NFVO.
|
||||
# Must be set when using external NFVO.
|
||||
# NOTE: It is assumed the VNFM communicates only one NFVO. That is
|
||||
# the same NFVO provides both the grant and vnf package management APIs.
|
||||
cfg.StrOpt('endpoint',
|
||||
default='',
|
||||
help='Endpoint of external NFVO.'),
|
||||
cfg.StrOpt('token_endpoint',
|
||||
default='',
|
||||
help='Token endpoint for OAuth2.0 authentication.'),
|
||||
cfg.StrOpt('client_id',
|
||||
default='',
|
||||
help='Client id used by OAuth2.0 authentication.'),
|
||||
cfg.StrOpt('client_password',
|
||||
default='',
|
||||
help='Client password used by OAuth2.0 authentication.'),
|
||||
cfg.BoolOpt('test_callback_uri',
|
||||
default=True,
|
||||
help='Check to get notification from callback Uri.'),
|
||||
cfg.ListOpt('test_grant_zone_list',
|
||||
default=["nova"],
|
||||
help='Zones used for test which returned in Grant response.')
|
||||
]
|
||||
|
||||
CONF.register_opts(NFVO_OPTS, 'v2_nfvo')
|
69
tacker/sol_refactored/common/coordinate.py
Normal file
69
tacker/sol_refactored/common/coordinate.py
Normal file
@ -0,0 +1,69 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.common import coordination
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# NOTE: It is used to prevent operation for the same vnf instance
|
||||
# from being processed at the same time. It can be applied between
|
||||
# threads of a process and different processes (e.g. tacker-server
|
||||
# and tacker-conductor) on a same host.
|
||||
# Note that race condition of very short time is not considered.
|
||||
|
||||
def lock_vnf_instance(inst_arg, delay=False):
|
||||
# NOTE: tacker-server issues RPC call to tacker-conductor
|
||||
# (just) before the lock released. 'delay' is for tacker-conductor
|
||||
# to be able to wait if it receives RPC call before tacker-server
|
||||
# releases the lock.
|
||||
|
||||
def operation_lock(func):
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
coord = coordination.COORDINATOR
|
||||
# ensure coordination start
|
||||
# NOTE: it is noop if already started.
|
||||
coord.start()
|
||||
|
||||
sig = inspect.signature(func)
|
||||
call_args = sig.bind(*args, **kwargs).arguments
|
||||
inst_id = inst_arg.format(**call_args)
|
||||
lock = coord.get_lock(inst_id)
|
||||
|
||||
blocking = False if not delay else 10
|
||||
# NOTE: 'with lock' is not used since it can't handle
|
||||
# lock failed exception well.
|
||||
if not lock.acquire(blocking=blocking):
|
||||
LOG.debug("Locking vnfInstance %s failed.", inst_id)
|
||||
raise sol_ex.OtherOperationInProgress(inst_id=inst_id)
|
||||
|
||||
try:
|
||||
LOG.debug("vnfInstance %s locked.", inst_id)
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
return wrapper
|
||||
|
||||
return operation_lock
|
212
tacker/sol_refactored/common/exceptions.py
Normal file
212
tacker/sol_refactored/common/exceptions.py
Normal file
@ -0,0 +1,212 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
class SolException(Exception):
|
||||
"""Exception for SOL ProblemDetails
|
||||
|
||||
Generally status, title and message should be defined in derived class.
|
||||
detail is constructed from message and kwargs.
|
||||
|
||||
Attributes in ProblemDetails can be specified in kwargs of object
|
||||
initialization. Use `sol_*` (ex. `sol_instance`) to avoid confliction
|
||||
with kwargs.
|
||||
"""
|
||||
|
||||
status = 500
|
||||
title = None
|
||||
message = 'Internal Server Error'
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.status = kwargs.pop('sol_status', self.status)
|
||||
self.title = kwargs.pop('sol_title', self.title)
|
||||
self.type = kwargs.pop('sol_type', None)
|
||||
self.instance = kwargs.pop('sol_instance', None)
|
||||
self.detail = kwargs.pop('sol_detail', self.message % kwargs)
|
||||
super().__init__(self.detail)
|
||||
|
||||
def make_problem_details(self):
|
||||
res = {'status': self.status,
|
||||
'detail': self.detail}
|
||||
if self.title is not None:
|
||||
res['title'] = self.title
|
||||
if self.type is not None:
|
||||
res['type'] = self.type
|
||||
if self.instance is not None:
|
||||
res['instance'] = self.instance
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class SolHttpError400(SolException):
|
||||
status = 400
|
||||
title = 'Bad Request'
|
||||
|
||||
|
||||
class SolHttpError403(SolException):
|
||||
status = 403
|
||||
title = 'Forbidden'
|
||||
|
||||
|
||||
class SolHttpError404(SolException):
|
||||
status = 404
|
||||
title = 'Not Found'
|
||||
|
||||
|
||||
class SolHttpError405(SolException):
|
||||
status = 405
|
||||
title = 'Method Not Allowed'
|
||||
|
||||
|
||||
class SolHttpError406(SolException):
|
||||
status = 406
|
||||
title = 'Not Acceptable'
|
||||
|
||||
|
||||
class SolHttpError409(SolException):
|
||||
status = 409
|
||||
title = 'Conflict'
|
||||
|
||||
|
||||
class SolHttpError422(SolException):
|
||||
status = 422
|
||||
title = 'Unprocessable Entity'
|
||||
|
||||
|
||||
class MethodNotAllowed(SolHttpError405):
|
||||
message = _("Method %(method)s is not supported.")
|
||||
|
||||
|
||||
class SolValidationError(SolHttpError400):
|
||||
message = _("%(detail)s")
|
||||
|
||||
|
||||
class InvalidAPIVersionString(SolHttpError400):
|
||||
message = _("Version String %(version)s is of invalid format. Must "
|
||||
"be of format Major.Minor.Patch.")
|
||||
|
||||
|
||||
class APIVersionMissing(SolHttpError400):
|
||||
message = _("'Version' HTTP header missing.")
|
||||
|
||||
|
||||
class APIVersionNotSupported(SolHttpError406):
|
||||
message = _("Version %(version)s not supported.")
|
||||
|
||||
|
||||
class VnfdIdNotEnabled(SolHttpError422):
|
||||
message = _("VnfId %(vnfd_id)s not ENABLED.")
|
||||
|
||||
|
||||
class VnfInstanceNotFound(SolHttpError404):
|
||||
message = _("VnfInstance %(inst_id)s not found.")
|
||||
|
||||
|
||||
class VnfInstanceIsInstantiated(SolHttpError409):
|
||||
message = _("VnfInstance %(inst_id)s is instantiated.")
|
||||
|
||||
|
||||
class VnfInstanceIsNotInstantiated(SolHttpError409):
|
||||
message = _("VnfInstance %(inst_id)s isn't instantiated.")
|
||||
|
||||
|
||||
class SubscriptionNotFound(SolHttpError404):
|
||||
message = _("Subscription %(subsc_id)s not found.")
|
||||
|
||||
|
||||
class VnfLcmOpOccNotFound(SolHttpError404):
|
||||
message = _("VnfLcmOpOcc %(lcmocc_id)s not found.")
|
||||
|
||||
|
||||
class VnfdIdNotFound(SolHttpError422):
|
||||
message = _("VnfPackage of vnfdId %(vnfd_id)s is not found or "
|
||||
"not operational.")
|
||||
|
||||
|
||||
class FlavourIdNotFound(SolHttpError400):
|
||||
message = _("FlavourId %(flavour_id)s not found in the vnfd.")
|
||||
|
||||
|
||||
class NoVimConnectionInfo(SolHttpError422):
|
||||
message = _("No VimConnectionInfo set to the VnfInstance.")
|
||||
|
||||
|
||||
class InvalidVnfdFormat(SolHttpError400):
|
||||
message = _("Vnfd is unexpected format.")
|
||||
|
||||
|
||||
class StackOperationFailed(SolHttpError422):
|
||||
# title and detail are set in the code from stack_status_reason
|
||||
pass
|
||||
|
||||
|
||||
class MgmtDriverExecutionFailed(SolHttpError422):
|
||||
title = 'Mgmt driver execution failed'
|
||||
# detail set in the code
|
||||
|
||||
|
||||
class BaseHOTNotDefined(SolHttpError400):
|
||||
message = _("BaseHOT is not defined.")
|
||||
|
||||
|
||||
class UserdataMissing(SolHttpError400):
|
||||
message = _("'lcm-operation-user-data' or "
|
||||
"'lcm-operation-user-data-class' missing.")
|
||||
|
||||
|
||||
class UserdataExecutionFailed(SolHttpError422):
|
||||
title = 'Userdata execution failed'
|
||||
# detail set in the code
|
||||
|
||||
|
||||
class TestNotificationFailed(SolHttpError422):
|
||||
message = _("Can't get from notification callback Uri.")
|
||||
|
||||
|
||||
class VimNotFound(SolHttpError404):
|
||||
message = _("VIM %(vim_id)s not found.")
|
||||
|
||||
|
||||
class OtherOperationInProgress(SolHttpError409):
|
||||
message = _("Other LCM operation of vnfInstance %(inst_id)s "
|
||||
"is in progress.")
|
||||
|
||||
|
||||
class UserDataClassNotImplemented(SolHttpError400):
|
||||
message = _("Userdata class not implemented.")
|
||||
|
||||
|
||||
class InvalidAttributeFilter(SolHttpError400):
|
||||
message = _("Attribute filter expression is invalid.")
|
||||
|
||||
|
||||
class InvalidAttributeSelector(SolHttpError400):
|
||||
message = _("Attribute selector expression is invalid.")
|
||||
|
||||
|
||||
class InvalidSubscription(SolHttpError400):
|
||||
# detail set in the code
|
||||
pass
|
||||
|
||||
|
||||
class ResponseTooBig(SolHttpError400):
|
||||
title = 'Response too big'
|
||||
message = _("Content length of the response is larger "
|
||||
"than %(size)d bytes.")
|
||||
|
||||
|
||||
class LocalNfvoGrantFailed(SolHttpError403):
|
||||
title = 'Grant failed'
|
||||
# detail set in the code
|
237
tacker/sol_refactored/common/http_client.py
Normal file
237
tacker/sol_refactored/common/http_client.py
Normal file
@ -0,0 +1,237 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import abc
|
||||
|
||||
from keystoneauth1 import adapter
|
||||
from keystoneauth1 import http_basic
|
||||
from keystoneauth1.identity import v3
|
||||
from keystoneauth1 import noauth
|
||||
from keystoneauth1 import plugin
|
||||
from keystoneauth1 import session
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.objects import base
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HttpClient(object):
|
||||
|
||||
def __init__(self, auth_handle, version=None,
|
||||
service_type='nfv-orchestration', connect_retries=None,
|
||||
timeout=None):
|
||||
self.auth_handle = auth_handle
|
||||
self.version = version
|
||||
self.service_type = service_type
|
||||
# NOTE: these parameters could be used from ex. configuration
|
||||
# if a HttpClient user want to use these.
|
||||
self.connect_retries = connect_retries
|
||||
self.timeout = timeout
|
||||
|
||||
def do_request(self, url, method, context=None, expected_status=[],
|
||||
**kwargs):
|
||||
content_type = kwargs.pop('content_type', 'application/json')
|
||||
|
||||
headers = kwargs.setdefault('headers', {})
|
||||
headers.setdefault('Accept', content_type)
|
||||
|
||||
body = kwargs.pop('body', None)
|
||||
if body is not None:
|
||||
if isinstance(body, base.TackerObject):
|
||||
body = body.to_dict()
|
||||
if isinstance(body, dict):
|
||||
body = jsonutils.dumps(body)
|
||||
kwargs.setdefault('data', body)
|
||||
headers.setdefault('Content-Type', content_type)
|
||||
|
||||
version = kwargs.pop('version', None) or self.version
|
||||
if version is not None:
|
||||
headers.setdefault('Version', version)
|
||||
|
||||
if self.connect_retries is not None:
|
||||
kwargs.setdefault('connect_retries', self.connect_retries)
|
||||
if self.timeout is not None:
|
||||
kwargs.setdefault('timeout', self.timeout)
|
||||
|
||||
session = self.auth_handle.get_session(
|
||||
self.auth_handle.get_auth(context), self.service_type)
|
||||
resp = session.request(url, method, raise_exc=False, **kwargs)
|
||||
|
||||
resp_body = self._decode_body(resp)
|
||||
|
||||
if expected_status and resp.status_code not in expected_status:
|
||||
self.raise_sol_exception(resp, resp_body)
|
||||
|
||||
return resp, resp_body
|
||||
|
||||
def raise_sol_exception(self, resp, resp_body):
|
||||
content_type = resp.headers['Content-Type']
|
||||
kwargs = {'sol_status': resp.status_code}
|
||||
if content_type == 'application/problem+json':
|
||||
kwargs['sol_detail'] = resp_body['detail']
|
||||
else:
|
||||
kwargs['sol_detail'] = resp.text
|
||||
|
||||
raise sol_ex.SolException(**kwargs)
|
||||
|
||||
def _decode_body(self, resp):
|
||||
if resp.status_code == 204: # no content
|
||||
return
|
||||
content_type = resp.headers['Content-Type']
|
||||
if content_type == 'application/zip':
|
||||
return resp.content
|
||||
if content_type == 'text/plain':
|
||||
return resp.text
|
||||
if resp.text:
|
||||
return jsonutils.loads(resp.text)
|
||||
# otherwise return None
|
||||
|
||||
|
||||
class AuthHandle(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_auth(self, context=None):
|
||||
# returns keystoneauth1 authentication plugin object
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_session(self, auth, service_type):
|
||||
# returns keystoneauth1 session object
|
||||
pass
|
||||
|
||||
|
||||
class KeystoneTokenAuthHandle(AuthHandle):
|
||||
|
||||
def __init__(self, auth_url, context):
|
||||
self.auth_url = auth_url
|
||||
self.context = context
|
||||
|
||||
def get_auth(self, context):
|
||||
if context is None:
|
||||
context = self.context
|
||||
return v3.Token(auth_url=self.auth_url,
|
||||
token=context.auth_token,
|
||||
project_id=context.project_id,
|
||||
project_domain_id=context.project_domain_id)
|
||||
|
||||
def get_session(self, auth, service_type):
|
||||
_session = session.Session(auth=auth, verify=False)
|
||||
return adapter.Adapter(session=_session,
|
||||
service_type=service_type)
|
||||
|
||||
|
||||
class KeystonePasswordAuthHandle(AuthHandle):
|
||||
|
||||
def __init__(self, auth_url, username, password,
|
||||
project_name, user_domain_name, project_domain_name):
|
||||
self.auth_url = auth_url
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.project_name = project_name
|
||||
self.user_domain_name = user_domain_name
|
||||
self.project_domain_name = project_domain_name
|
||||
|
||||
def get_auth(self, context=None):
|
||||
return v3.Password(auth_url=self.auth_url,
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
project_name=self.project_name,
|
||||
user_domain_name=self.user_domain_name,
|
||||
project_domain_name=self.project_domain_name)
|
||||
|
||||
def get_session(self, auth, service_type):
|
||||
_session = session.Session(auth=auth, verify=False)
|
||||
return adapter.Adapter(session=_session,
|
||||
service_type=service_type)
|
||||
|
||||
|
||||
class BasicAuthHandle(AuthHandle):
|
||||
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def get_auth(self, context=None):
|
||||
return http_basic.HTTPBasicAuth(username=self.username,
|
||||
password=self.password)
|
||||
|
||||
def get_session(self, auth, service_type):
|
||||
return session.Session(auth=auth, verify=False)
|
||||
|
||||
|
||||
class NoAuthHandle(AuthHandle):
|
||||
|
||||
def __init__(self, endpoint=None):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def get_auth(self, context=None):
|
||||
return noauth.NoAuth(endpoint=self.endpoint)
|
||||
|
||||
def get_session(self, auth, service_type):
|
||||
return session.Session(auth=auth, verify=False)
|
||||
|
||||
|
||||
class Oauth2AuthPlugin(plugin.FixedEndpointPlugin):
|
||||
|
||||
def __init__(self, endpoint, token_endpoint, client_id, client_password):
|
||||
super(Oauth2AuthPlugin, self).__init__(endpoint)
|
||||
self.token_endpoint = token_endpoint
|
||||
self.client_id = client_id
|
||||
self.client_password = client_password
|
||||
|
||||
def get_token(self, session, **kwargs):
|
||||
auth = BasicAuthHandle(self.client_id,
|
||||
self.client_password)
|
||||
client = HttpClient(auth)
|
||||
|
||||
url = self.token_endpoint + '/token'
|
||||
data = {'grant_type': 'client_credentials'}
|
||||
|
||||
resp, resp_body = client.do_request(url, "POST",
|
||||
data=data, content_type='application/x-www-form-urlencoded')
|
||||
|
||||
if resp.status_code != 200:
|
||||
LOG.error("get OAuth2 token failed: %d" % resp.status_code)
|
||||
return
|
||||
|
||||
return resp_body['access_token']
|
||||
|
||||
def get_headers(self, session, **kwargs):
|
||||
token = self.get_token(session)
|
||||
if not token:
|
||||
return None
|
||||
auth = 'Bearer %s' % token
|
||||
return {'Authorization': auth}
|
||||
|
||||
|
||||
class OAuth2AuthHandle(AuthHandle):
|
||||
|
||||
def __init__(self, endpoint, token_endpoint, client_id, client_password):
|
||||
self.endpoint = endpoint
|
||||
self.token_endpoint = token_endpoint
|
||||
self.client_id = client_id
|
||||
self.client_password = client_password
|
||||
|
||||
def get_auth(self, context=None):
|
||||
return Oauth2AuthPlugin(self.endpoint, self.token_endpoint,
|
||||
self.client_id, self.client_password)
|
||||
|
||||
def get_session(self, auth, service_type):
|
||||
return session.Session(auth=auth, verify=False)
|
181
tacker/sol_refactored/common/lcm_op_occ_utils.py
Normal file
181
tacker/sol_refactored/common/lcm_op_occ_utils.py
Normal file
@ -0,0 +1,181 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import subscription_utils as subsc_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.sol_refactored.objects.v2 import fields
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__) # not used at the moment
|
||||
|
||||
|
||||
def get_lcmocc(context, lcmocc_id):
|
||||
lcmocc = objects.VnfLcmOpOccV2.get_by_id(context, lcmocc_id)
|
||||
if lcmocc is None:
|
||||
raise sol_ex.VnfLcmOpOccNotFound(lcmocc_id=id)
|
||||
return lcmocc
|
||||
|
||||
|
||||
def get_lcmocc_all(context):
|
||||
return objects.VnfLcmOpOccV2.get_all(context)
|
||||
|
||||
|
||||
def lcmocc_href(lcmocc_id, endpoint):
|
||||
return "{}/v2/vnflcm/vnf_lcm_op_occs/{}".format(endpoint, lcmocc_id)
|
||||
|
||||
|
||||
def make_lcmocc_links(lcmocc, endpoint):
|
||||
links = objects.VnfLcmOpOccV2_Links()
|
||||
links.self = objects.Link(href=lcmocc_href(lcmocc.id, endpoint))
|
||||
links.vnfInstance = objects.Link(
|
||||
href=inst_utils.inst_href(lcmocc.vnfInstanceId, endpoint))
|
||||
# TODO(oda-g): add when implemented
|
||||
# links.grant
|
||||
# links.cancel
|
||||
# links.retry
|
||||
# links.rollback
|
||||
# links.fail
|
||||
# links.vnfSnapshot
|
||||
|
||||
return links
|
||||
|
||||
|
||||
def make_lcmocc_notif_data(subsc, lcmocc, endpoint):
|
||||
notif_data = objects.VnfLcmOperationOccurrenceNotificationV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
notificationType="VnfLcmOperationOccurrenceNotification",
|
||||
subscriptionId=subsc.id,
|
||||
timeStamp=datetime.utcnow(),
|
||||
operationState=lcmocc.operationState,
|
||||
vnfInstanceId=lcmocc.vnfInstanceId,
|
||||
operation=lcmocc.operation,
|
||||
isAutomaticInvocation=lcmocc.isAutomaticInvocation,
|
||||
verbosity=subsc.verbosity,
|
||||
vnfLcmOpOccId=lcmocc.id,
|
||||
_links=objects.LccnLinksV2(
|
||||
vnfInstance=objects.NotificationLink(
|
||||
href=inst_utils.inst_href(lcmocc.vnfInstanceId, endpoint)),
|
||||
subscription=objects.NotificationLink(
|
||||
href=subsc_utils.subsc_href(subsc.id, endpoint)),
|
||||
vnfLcmOpOcc=objects.NotificationLink(
|
||||
href=lcmocc_href(lcmocc.id, endpoint))
|
||||
)
|
||||
)
|
||||
|
||||
if lcmocc.operationState == fields.LcmOperationStateType.STARTING:
|
||||
notif_data.notificationStatus = 'START'
|
||||
else:
|
||||
notif_data.notificationStatus = 'RESULT'
|
||||
|
||||
if lcmocc.obj_attr_is_set('error'):
|
||||
notif_data.error = lcmocc.error
|
||||
|
||||
if notif_data.verbosity == fields.LcmOpOccNotificationVerbosityType.FULL:
|
||||
if lcmocc.obj_attr_is_set('resourceChanges'):
|
||||
attrs = ['affectedVnfcs',
|
||||
'affectedVirtualLinks',
|
||||
'affectedExtLinkPorts',
|
||||
'affectedVirtualStorages']
|
||||
for attr in attrs:
|
||||
if lcmocc.resourceChanges.obj_attr_is_set(attr):
|
||||
notif_data[attr] = lcmocc.resourceChanges[attr]
|
||||
attrs = ['changedInfo',
|
||||
'changedExtConnectivity',
|
||||
'modificationsTriggeredByVnfPkgChange']
|
||||
for attr in attrs:
|
||||
if lcmocc.obj_attr_is_set(attr):
|
||||
notif_data[attr] = lcmocc[attr]
|
||||
|
||||
return notif_data
|
||||
|
||||
|
||||
def _make_instantiate_lcmocc(lcmocc, inst, change_type):
|
||||
# make ResourceChanges of lcmocc from instantiatedVnfInfo.
|
||||
# NOTE: grant related info such as resourceDefinitionId, zoneId
|
||||
# and so on are not included in lcmocc since such info are not
|
||||
# included in instantiatedVnfInfo.
|
||||
|
||||
inst_info = inst.instantiatedVnfInfo
|
||||
|
||||
lcmocc_vncs = []
|
||||
if inst_info.obj_attr_is_set('vnfcResourceInfo'):
|
||||
for inst_vnc in inst_info.vnfcResourceInfo:
|
||||
lcmocc_vnc = objects.AffectedVnfcV2(
|
||||
id=inst_vnc.id,
|
||||
vduId=inst_vnc.vduId,
|
||||
changeType=change_type,
|
||||
computeResource=inst_vnc.computeResource
|
||||
)
|
||||
if inst_vnc.obj_attr_is_set('vnfcCpInfo'):
|
||||
cp_ids = [cp.id for cp in inst_vnc.vnfcCpInfo]
|
||||
lcmocc_vnc.affectedVnfcCpIds = cp_ids
|
||||
if inst_vnc.obj_attr_is_set('storageResourceIds'):
|
||||
str_ids = inst_vnc.storageResourceIds
|
||||
if change_type == 'ADDED':
|
||||
lcmocc_vnc.addedStorageResourceIds = str_ids
|
||||
else: # 'REMOVED'
|
||||
lcmocc_vnc.removedStorageResourceIds = str_ids
|
||||
lcmocc_vncs.append(lcmocc_vnc)
|
||||
|
||||
lcmocc_vls = []
|
||||
if inst_info.obj_attr_is_set('vnfVirtualLinkResourceInfo'):
|
||||
for inst_vl in inst_info.vnfVirtualLinkResourceInfo:
|
||||
lcmocc_vl = objects.AffectedVirtualLinkV2(
|
||||
id=inst_vl.id,
|
||||
vnfVirtualLinkDescId=inst_vl.vnfVirtualLinkDescId,
|
||||
changeType=change_type,
|
||||
networkResource=inst_vl.networkResource
|
||||
)
|
||||
if inst_vl.obj_attr_is_set('vnfLinkPorts'):
|
||||
port_ids = [port.id for port in inst_vl.vnfLinkPorts]
|
||||
lcmocc_vl.vnfLinkPortIds = port_ids
|
||||
lcmocc_vls.append(lcmocc_vl)
|
||||
|
||||
lcmocc_strs = []
|
||||
if inst_info.obj_attr_is_set('virtualStorageResourceInfo'):
|
||||
for inst_str in inst_info.virtualStorageResourceInfo:
|
||||
lcmocc_str = objects.AffectedVirtualStorageV2(
|
||||
id=inst_str.id,
|
||||
virtualStorageDescId=inst_str.virtualStorageDescId,
|
||||
changeType=change_type,
|
||||
storageResource=inst_str.storageResource
|
||||
)
|
||||
lcmocc_strs.append(lcmocc_str)
|
||||
|
||||
if lcmocc_vncs or lcmocc_vls or lcmocc_strs:
|
||||
change_info = objects.VnfLcmOpOccV2_ResourceChanges()
|
||||
if lcmocc_vncs:
|
||||
change_info.affectedVnfcs = lcmocc_vncs
|
||||
if lcmocc_vls:
|
||||
change_info.affectedVirtualLinks = lcmocc_vls
|
||||
if lcmocc_strs:
|
||||
change_info.affectedVirtualStorages = lcmocc_strs
|
||||
lcmocc.resourceChanges = change_info
|
||||
|
||||
|
||||
def make_instantiate_lcmocc(lcmocc, inst):
|
||||
_make_instantiate_lcmocc(lcmocc, inst, 'ADDED')
|
||||
|
||||
|
||||
def make_terminate_lcmocc(lcmocc, inst):
|
||||
_make_instantiate_lcmocc(lcmocc, inst, 'REMOVED')
|
269
tacker/sol_refactored/common/subscription_utils.py
Normal file
269
tacker/sol_refactored/common/subscription_utils.py
Normal file
@ -0,0 +1,269 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import threading
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import timeutils
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import http_client
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
TEST_NOTIFICATION_TIMEOUT = 20 # seconds
|
||||
|
||||
|
||||
def get_subsc(context, subsc_id):
|
||||
subsc = objects.LccnSubscriptionV2.get_by_id(context, subsc_id)
|
||||
if subsc is None:
|
||||
raise sol_ex.LccnSubscriptionNotFound(subsc_id=subsc_id)
|
||||
return subsc
|
||||
|
||||
|
||||
def get_subsc_all(context):
|
||||
return objects.LccnSubscriptionV2.get_all(context)
|
||||
|
||||
|
||||
def subsc_href(subsc_id, endpoint):
|
||||
return "{}/v2/vnflcm/subscriptions/{}".format(endpoint, subsc_id)
|
||||
|
||||
|
||||
def _get_notification_auth_handle(subsc):
|
||||
if not subsc.obj_attr_is_set('authentication'):
|
||||
return http_client.NoAuthHandle()
|
||||
elif subsc.authentication.obj_attr_is_set('paramsBasic'):
|
||||
param = subsc.authentication.paramsBasic
|
||||
return http_client.BasicAuthHandle(param.userName, param.password)
|
||||
elif subsc.authentication.obj_attr_is_set(
|
||||
'paramsOauth2ClientCredentials'):
|
||||
param = subsc.authentication.paramsOauth2ClientCredentials
|
||||
return http_client.OAuth2AuthHandle(None,
|
||||
param.tokenEndpoint, param.clientId, param.clientPassword)
|
||||
|
||||
# not reach here
|
||||
|
||||
|
||||
def async_call(func):
|
||||
def inner(*args, **kwargs):
|
||||
th = threading.Thread(target=func, args=args,
|
||||
kwargs=kwargs, daemon=True)
|
||||
th.start()
|
||||
return inner
|
||||
|
||||
|
||||
@async_call
|
||||
def send_notification(subsc, notif_data):
|
||||
auth_handle = _get_notification_auth_handle(subsc)
|
||||
client = http_client.HttpClient(auth_handle,
|
||||
version=api_version.CURRENT_VERSION)
|
||||
|
||||
url = subsc.callbackUri
|
||||
try:
|
||||
resp, body = client.do_request(url, "POST", body=notif_data)
|
||||
if resp.status_code != 204:
|
||||
LOG.error("send_notification failed: %d" % resp.status_code)
|
||||
except Exception:
|
||||
# it may occur if test_notification was not executed.
|
||||
LOG.exception("send_notification failed")
|
||||
|
||||
|
||||
def test_notification(subsc):
|
||||
auth_handle = _get_notification_auth_handle(subsc)
|
||||
client = http_client.HttpClient(auth_handle,
|
||||
version=api_version.CURRENT_VERSION,
|
||||
timeout=TEST_NOTIFICATION_TIMEOUT)
|
||||
|
||||
url = subsc.callbackUri
|
||||
try:
|
||||
resp, _ = client.do_request(url, "GET")
|
||||
if resp.status_code != 204:
|
||||
raise sol_ex.TestNotificationFailed()
|
||||
except Exception:
|
||||
# any sort of error is considered. avoid 500 error.
|
||||
raise sol_ex.TestNotificationFailed()
|
||||
|
||||
|
||||
def match_version(version, inst):
|
||||
# - vnfSoftwareVersion 1
|
||||
# - vnfdVersions 0..N
|
||||
if version.vnfSoftwareVersion != inst.vnfSoftwareVersion:
|
||||
return False
|
||||
|
||||
if not version.obj_attr_is_set('vnfdVersions'):
|
||||
# OK, no more check necessary.
|
||||
return True
|
||||
|
||||
return inst.vnfdVersion in version.vnfdVersions
|
||||
|
||||
|
||||
def match_products_per_provider(products, inst):
|
||||
# - vnfProvider 1
|
||||
# - vnfProducts 0..N
|
||||
# - vnfProductName 1
|
||||
# - versions 0..N
|
||||
# - vnfSoftwareVersion 1
|
||||
# - vnfdVersions 0..N
|
||||
if products.vnfProvider != inst.vnfProvider:
|
||||
return False
|
||||
|
||||
if not products.obj_attr_is_set('vnfProducts'):
|
||||
# OK, no more check necessary.
|
||||
return True
|
||||
|
||||
for product in products.vnfProducts:
|
||||
if product.vnfProductName == inst.vnfProductName:
|
||||
if not product.obj_attr_is_set('versions'):
|
||||
# OK, no more check necessary.
|
||||
return True
|
||||
for ver in product.versions:
|
||||
if match_version(ver, inst):
|
||||
# OK, match.
|
||||
return True
|
||||
# no match
|
||||
return False
|
||||
|
||||
|
||||
def match_inst_subsc_filter(inst_filter, inst):
|
||||
# inst_filter: VnfInstanceSubscriptionFilter
|
||||
# - vnfdIds 0..N
|
||||
# - VnfProductsFromProviders 0..N
|
||||
# - vnfInstanceIds 0..N
|
||||
# - vnfInstanceNames 0..N
|
||||
if inst_filter.obj_attr_is_set('vnfdIds'):
|
||||
if inst.vnfdId not in inst_filter.vnfdIds:
|
||||
return False
|
||||
|
||||
if inst_filter.obj_attr_is_set('vnfProductsFromProviders'):
|
||||
products_providers = inst_filter.vnfProductsFromProviders
|
||||
match = False
|
||||
for products in products_providers:
|
||||
if match_products_per_provider(products, inst):
|
||||
match = True
|
||||
break
|
||||
if not match:
|
||||
# no match found
|
||||
return False
|
||||
|
||||
if inst_filter.obj_attr_is_set('vnfInstanceIds'):
|
||||
if inst.id not in inst_filter.vnfInstanceIds:
|
||||
return False
|
||||
|
||||
if inst_filter.obj_attr_is_set('vnfInstanceNames'):
|
||||
if inst.vnfInstanceNames not in inst_filter.vnfInstanceNames:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_inst_create_subscs(context, inst):
|
||||
return get_matched_subscs(context, inst,
|
||||
'VnfIdentifierCreationNotification', None, None)
|
||||
|
||||
|
||||
def get_inst_delete_subscs(context, inst):
|
||||
return get_matched_subscs(context, inst,
|
||||
'VnfIdentifierDeletionNotification', None, None)
|
||||
|
||||
|
||||
def get_lcmocc_subscs(context, lcmocc, inst):
|
||||
return get_matched_subscs(context, inst,
|
||||
'VnfLcmOperationOccurrenceNotification',
|
||||
lcmocc.operation, lcmocc.operationState)
|
||||
|
||||
|
||||
def get_matched_subscs(context, inst, notif_type, op_type, op_status):
|
||||
subscs = []
|
||||
for subsc in get_subsc_all(context):
|
||||
# subsc: LccnSubscription
|
||||
|
||||
if not subsc.obj_attr_is_set('filter'):
|
||||
# no filter. get it.
|
||||
subscs.append(subsc)
|
||||
continue
|
||||
|
||||
# subsc.fulter: LifecycleChangeNotificationsFilter
|
||||
# - vnfInstanceSubscriptionFilter 0..1
|
||||
# - notificationTypes 0..N
|
||||
# - operationTypes 0..N
|
||||
# - operationStatus 0..N
|
||||
if subsc.filter.obj_attr_is_set('vnfInstanceSubscriptionFilter'):
|
||||
inst_filter = subsc.filter.vnfInstanceSubscriptionFilter
|
||||
if not match_inst_subsc_filter(inst_filter, inst):
|
||||
continue
|
||||
|
||||
if subsc.filter.obj_attr_is_set('notificationTypes'):
|
||||
if notif_type not in subsc.filter.notificationTypes:
|
||||
continue
|
||||
|
||||
if (op_type is not None and
|
||||
subsc.filter.obj_attr_is_set('operationTypes')):
|
||||
if op_type not in subsc.filter.operationTypes:
|
||||
continue
|
||||
|
||||
if (op_status is not None and
|
||||
subsc.filter.obj_attr_is_set('operationStatus')):
|
||||
if op_status not in subsc.filter.operationStatus:
|
||||
continue
|
||||
|
||||
# OK, matched
|
||||
subscs.append(subsc)
|
||||
|
||||
return subscs
|
||||
|
||||
|
||||
def make_create_inst_notif_data(subsc, inst, endpoint):
|
||||
notif_data = objects.VnfIdentifierCreationNotificationV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
notificationType="VnfIdentifierCreationNotification",
|
||||
subscriptionId=subsc.id,
|
||||
timeStamp=timeutils.utcnow(),
|
||||
vnfInstanceId=inst.id,
|
||||
_links=objects.LccnLinksV2(
|
||||
vnfInstance=objects.NotificationLink(
|
||||
href=inst_utils.inst_href(inst.id, endpoint)),
|
||||
subscription=objects.NotificationLink(
|
||||
href=subsc_href(subsc.id, endpoint))
|
||||
)
|
||||
# vnfLcmOpOcc: is not necessary
|
||||
)
|
||||
return notif_data
|
||||
|
||||
|
||||
def make_delete_inst_notif_data(subsc, inst, endpoint):
|
||||
notif_data = objects.VnfIdentifierDeletionNotificationV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
notificationType="VnfIdentifierDeletionNotification",
|
||||
subscriptionId=subsc.id,
|
||||
timeStamp=timeutils.utcnow(),
|
||||
vnfInstanceId=inst.id,
|
||||
_links=objects.LccnLinksV2(
|
||||
vnfInstance=objects.NotificationLink(
|
||||
href=inst_utils.inst_href(inst.id, endpoint)),
|
||||
subscription=objects.NotificationLink(
|
||||
href=subsc_href(subsc.id, endpoint))
|
||||
)
|
||||
# vnfLcmOpOcc: is not necessary
|
||||
)
|
||||
return notif_data
|
79
tacker/sol_refactored/common/vim_utils.py
Normal file
79
tacker/sol_refactored/common/vim_utils.py
Normal file
@ -0,0 +1,79 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.vnfm import vim_client
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_default_vim(context):
|
||||
try:
|
||||
vim = vim_client.VimClient().get_vim(context)
|
||||
return vim_to_conn_info(vim)
|
||||
except Exception as ex:
|
||||
LOG.debug("default VIM not found: %s", ex)
|
||||
|
||||
|
||||
def get_vim(context, vim_id):
|
||||
try:
|
||||
vim = vim_client.VimClient().get_vim(context, vim_id=vim_id)
|
||||
return vim_to_conn_info(vim)
|
||||
except Exception as ex:
|
||||
LOG.error("VIM %s not found: %s", vim_id, ex)
|
||||
raise sol_ex.VimNotFound(vim_id=vim_id)
|
||||
|
||||
|
||||
def vim_to_conn_info(vim):
|
||||
if vim['vim_type'] == "openstack":
|
||||
# see. https://nfvwiki.etsi.org/index.php
|
||||
# ?title=ETSINFV.OPENSTACK_KEYSTONE.V_3
|
||||
region = None
|
||||
if vim.get('placement_attr', {}).get('regions'):
|
||||
region = vim['placement_attr']['regions'][0]
|
||||
|
||||
vim_auth = vim['vim_auth']
|
||||
access_info = {
|
||||
'username': vim_auth['username'],
|
||||
'password': vim_auth['password'],
|
||||
'region': region,
|
||||
'project': vim_auth['project_name'],
|
||||
'projectDomain': vim_auth['project_domain_name'],
|
||||
'userDomain': vim_auth['user_domain_name']
|
||||
}
|
||||
interface_info = {
|
||||
'endpoint': vim_auth['auth_url'],
|
||||
# NOTE: certification is not supported at the moment.
|
||||
# TODO(oda-g): certification support if required.
|
||||
'skipCertificateHostnameCheck': True,
|
||||
'skipCertificateVerification': True
|
||||
# trustedCertificates is omitted
|
||||
}
|
||||
|
||||
return objects.VimConnectionInfo(
|
||||
vimId=vim['vim_id'],
|
||||
vimType='ETSINFV.OPENSTACK_KEYSTONE.V_3',
|
||||
interfaceInfo=interface_info,
|
||||
accessInfo=access_info
|
||||
)
|
||||
else: # k8s
|
||||
# TODO(oda-g): not supported at the moment
|
||||
pass
|
77
tacker/sol_refactored/common/vnf_instance_utils.py
Normal file
77
tacker/sol_refactored/common/vnf_instance_utils.py
Normal file
@ -0,0 +1,77 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__) # not used at the moment
|
||||
|
||||
|
||||
def get_inst(context, inst_id):
|
||||
inst = objects.VnfInstanceV2.get_by_id(context, inst_id)
|
||||
if inst is None:
|
||||
raise sol_ex.VnfInstanceNotFound(inst_id=inst_id)
|
||||
return inst
|
||||
|
||||
|
||||
def get_inst_all(context):
|
||||
return objects.VnfInstanceV2.get_all(context)
|
||||
|
||||
|
||||
def inst_href(inst_id, endpoint):
|
||||
return "{}/v2/vnflcm/vnf_instances/{}".format(endpoint, inst_id)
|
||||
|
||||
|
||||
def make_inst_links(inst, endpoint):
|
||||
links = objects.VnfInstanceV2_Links()
|
||||
self_href = inst_href(inst.id, endpoint)
|
||||
links.self = objects.Link(href=self_href)
|
||||
if inst.instantiationState == 'NOT_INSTANTIATED':
|
||||
links.instantiate = objects.Link(href=self_href + "/instantiate")
|
||||
else: # 'INSTANTIATED'
|
||||
links.terminate = objects.Link(href=self_href + "/terminate")
|
||||
# TODO(oda-g): add when the operation supported
|
||||
# links.scale = objects.Link(href = self_href + "/scale")
|
||||
# etc.
|
||||
|
||||
return links
|
||||
|
||||
|
||||
# see IETF RFC 7396
|
||||
def json_merge_patch(target, patch):
|
||||
if isinstance(patch, dict):
|
||||
if not isinstance(target, dict):
|
||||
target = {}
|
||||
for key, value in patch.items():
|
||||
if value is None:
|
||||
if key in target:
|
||||
del target[key]
|
||||
else:
|
||||
target[key] = json_merge_patch(target.get(key), value)
|
||||
return target
|
||||
else:
|
||||
return patch
|
||||
|
||||
|
||||
def select_vim_info(vim_connection_info):
|
||||
# NOTE: It is assumed that vimConnectionInfo has only one item
|
||||
# at the moment. If there are multiple items, it is uncertain
|
||||
# which item is selected.
|
||||
for vim_info in vim_connection_info.values():
|
||||
return vim_info
|
353
tacker/sol_refactored/common/vnfd_utils.py
Normal file
353
tacker/sol_refactored/common/vnfd_utils.py
Normal file
@ -0,0 +1,353 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import io
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import yaml
|
||||
import zipfile
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Vnfd(object):
|
||||
|
||||
def __init__(self, vnfd_id):
|
||||
self.vnfd_id = vnfd_id
|
||||
self.tosca_meta = {}
|
||||
self.definitions = {}
|
||||
self.vnfd_flavours = {}
|
||||
self.csar_dir = None
|
||||
self.csar_dir_is_tmp = False
|
||||
|
||||
def init_from_csar_dir(self, csar_dir):
|
||||
self.csar_dir = csar_dir
|
||||
self.init_vnfd()
|
||||
|
||||
def init_from_zip_file(self, zip_file):
|
||||
# NOTE: This is used when external NFVO is used.
|
||||
# TODO(oda-g): There is no delete route at the moment.
|
||||
# A possible enhance is that introducing cache management for
|
||||
# extracted vnf packages from external NFVO.
|
||||
self.csar_dir = tempfile.mkdtemp()
|
||||
self.csar_dir_is_tmp = True
|
||||
|
||||
buff = io.BytesIO(zip_file)
|
||||
with zipfile.ZipFile(buff, 'r') as zf:
|
||||
zf.extractall(self.csar_dir)
|
||||
|
||||
self.init_vnfd()
|
||||
|
||||
def init_vnfd(self):
|
||||
# assume TOSCA-Metadata format
|
||||
path = os.path.join(self.csar_dir, 'TOSCA-Metadata', 'TOSCA.meta')
|
||||
if not os.path.isfile(path):
|
||||
raise sol_ex.InvalidVnfdFormat()
|
||||
|
||||
# expand from yaml to dict for TOSCA.meta and Definitions
|
||||
with open(path, 'r') as f:
|
||||
self.tosca_meta = yaml.safe_load(f.read())
|
||||
|
||||
path = os.path.join(self.csar_dir, 'Definitions')
|
||||
for entry in os.listdir(path):
|
||||
if entry.endswith(('.yaml', '.yml')):
|
||||
with open(os.path.join(path, entry), 'r') as f:
|
||||
content = yaml.safe_load(f.read())
|
||||
self.definitions[entry] = content
|
||||
|
||||
def delete(self):
|
||||
if self.csar_dir_is_tmp:
|
||||
shutil.rmtree(self.csar_dir)
|
||||
|
||||
def get_vnfd_flavour(self, flavour_id):
|
||||
if flavour_id in self.vnfd_flavours:
|
||||
return self.vnfd_flavours[flavour_id]
|
||||
|
||||
for data in self.definitions.values():
|
||||
fid = (data
|
||||
.get('topology_template', {})
|
||||
.get('substitution_mappings', {})
|
||||
.get('properties', {})
|
||||
.get('flavour_id'))
|
||||
if fid == flavour_id:
|
||||
self.vnfd_flavours[flavour_id] = data
|
||||
return data
|
||||
|
||||
# NOT found.
|
||||
# NOTE: checked by the caller. basically check is necessary at
|
||||
# instantiate start only.
|
||||
|
||||
def get_sw_image(self, flavour_id):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
nodes = (vnfd
|
||||
.get('topology_template', {})
|
||||
.get('node_templates', {}))
|
||||
types = ['tosca.nodes.nfv.Vdu.Compute',
|
||||
'tosca.nodes.nfv.Vdu.VirtualBlockStorage']
|
||||
sw_image = {}
|
||||
for name, data in nodes.items():
|
||||
if (data['type'] in types and
|
||||
data.get('properties', {}).get('sw_image_data')):
|
||||
image = data['properties']['sw_image_data']['name']
|
||||
sw_image[name] = image
|
||||
|
||||
return sw_image
|
||||
|
||||
def get_sw_image_data(self, flavour_id):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
nodes = (vnfd
|
||||
.get('topology_template', {})
|
||||
.get('node_templates', {}))
|
||||
types = ['tosca.nodes.nfv.Vdu.Compute',
|
||||
'tosca.nodes.nfv.Vdu.VirtualBlockStorage']
|
||||
sw_image = {}
|
||||
for name, data in nodes.items():
|
||||
if (data['type'] in types and
|
||||
data.get('properties', {}).get('sw_image_data')):
|
||||
sw_image[name] = data['properties']['sw_image_data']
|
||||
sw_file = (data
|
||||
.get('artifacts', {})
|
||||
.get('sw_image', {})
|
||||
.get('file'))
|
||||
if sw_file:
|
||||
sw_image[name]['file'] = sw_file
|
||||
|
||||
return sw_image
|
||||
|
||||
def get_vnfd_properties(self):
|
||||
"""return properties used by instantiate"""
|
||||
# get from node_templates of VNF of
|
||||
# - ['properties']['configurable_properties']
|
||||
# - ['properties']['modifiable_attributes']['extensions']
|
||||
# - ['properties']['modifiable_attributes']['metadata']
|
||||
# NOTE: In etsi_nfv_sol001_vnfd_types.yaml which used by
|
||||
# tacker examples, definitions of these properties are commented out.
|
||||
|
||||
prop = {
|
||||
'vnfConfigurableProperties': {},
|
||||
'extensions': {},
|
||||
'metadata': {}
|
||||
}
|
||||
return prop
|
||||
|
||||
def get_nodes(self, flavour_id, node_type):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
nodes = (vnfd
|
||||
.get('topology_template', {})
|
||||
.get('node_templates', {}))
|
||||
|
||||
res = {name: data
|
||||
for name, data in nodes.items() if data['type'] == node_type}
|
||||
|
||||
return res
|
||||
|
||||
def get_vdu_nodes(self, flavour_id):
|
||||
return self.get_nodes(flavour_id, 'tosca.nodes.nfv.Vdu.Compute')
|
||||
|
||||
def get_storage_nodes(self, flavour_id):
|
||||
return self.get_nodes(flavour_id,
|
||||
'tosca.nodes.nfv.Vdu.VirtualBlockStorage')
|
||||
|
||||
def get_virtual_link_nodes(self, flavour_id):
|
||||
return self.get_nodes(flavour_id,
|
||||
'tosca.nodes.nfv.VnfVirtualLink')
|
||||
|
||||
def get_vducp_nodes(self, flavour_id):
|
||||
return self.get_nodes(flavour_id, 'tosca.nodes.nfv.VduCp')
|
||||
|
||||
def get_vdu_cps(self, flavour_id, vdu_name):
|
||||
cp_nodes = self.get_vducp_nodes(flavour_id)
|
||||
cps = []
|
||||
for cp_name, cp_data in cp_nodes.items():
|
||||
reqs = cp_data.get('requirements', [])
|
||||
for req in reqs:
|
||||
if req.get('virtual_binding') == vdu_name:
|
||||
cps.append(cp_name)
|
||||
break
|
||||
return cps
|
||||
|
||||
def get_base_hot(self, flavour_id):
|
||||
# NOTE: this method is openstack specific
|
||||
hot_dict = {}
|
||||
path = os.path.join(self.csar_dir, 'BaseHOT', flavour_id)
|
||||
if not os.path.isdir(path):
|
||||
return hot_dict
|
||||
|
||||
for entry in os.listdir(path):
|
||||
if entry.endswith(('.yaml', '.yml')):
|
||||
with open(os.path.join(path, entry), 'r') as f:
|
||||
content = yaml.safe_load(f.read())
|
||||
hot_dict['template'] = content
|
||||
break
|
||||
|
||||
nested = os.path.join(path, 'nested')
|
||||
if not os.path.isdir(nested):
|
||||
return hot_dict
|
||||
|
||||
for entry in os.listdir(nested):
|
||||
if entry.endswith(('.yaml', '.yml')):
|
||||
with open(os.path.join(nested, entry), 'r') as f:
|
||||
content = yaml.safe_load(f.read())
|
||||
hot_dict.setdefault('files', {})
|
||||
hot_dict['files'][entry] = content
|
||||
|
||||
return hot_dict
|
||||
|
||||
def get_vl_name_from_cp(self, flavour_id, cp_data):
|
||||
for req in cp_data.get('requirements', []):
|
||||
if 'virtual_link' in req:
|
||||
return req['virtual_link']
|
||||
|
||||
def get_compute_flavor(self, flavour_id, vdu_name):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
flavor = (vnfd.get('topology_template', {})
|
||||
.get('node_templates', {})
|
||||
.get(vdu_name, {})
|
||||
.get('capabilities', {})
|
||||
.get('virtual_compute', {})
|
||||
.get('properties', {})
|
||||
.get('requested_additional_capabilities', {})
|
||||
.get('properties', {})
|
||||
.get('requested_additional_capability_name'))
|
||||
if flavor:
|
||||
return flavor
|
||||
|
||||
def make_tmp_csar_dir(self):
|
||||
# If this fails, 500 which is not caused by programming error
|
||||
# but true 'Internal server error' raises.
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
shutil.copytree(self.csar_dir, tmp_dir,
|
||||
ignore=shutil.ignore_patterns('Files'),
|
||||
dirs_exist_ok=True)
|
||||
return tmp_dir
|
||||
|
||||
def remove_tmp_csar_dir(self, tmp_dir):
|
||||
try:
|
||||
shutil.rmtree(tmp_dir)
|
||||
except Exception:
|
||||
LOG.exception("rmtree %s failed", tmp_dir)
|
||||
# as this error does not disturb the process, continue.
|
||||
|
||||
def get_policy_values_by_type(self, flavour_id, policy_type):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
policies = (vnfd.get('topology_template', {})
|
||||
.get('policies', []))
|
||||
if isinstance(policies, dict):
|
||||
policies = [policies]
|
||||
|
||||
ret = [value
|
||||
for policy in policies for value in policy.values()
|
||||
if value['type'] == policy_type]
|
||||
|
||||
return ret
|
||||
|
||||
def get_default_instantiation_level(self, flavour_id):
|
||||
policies = self.get_policy_values_by_type(flavour_id,
|
||||
'tosca.policies.nfv.InstantiationLevels')
|
||||
if policies:
|
||||
return policies[0].get('properties', {}).get('default_level')
|
||||
|
||||
def get_vdu_num(self, flavour_id, vdu_name, instantiation_level):
|
||||
policies = self.get_policy_values_by_type(flavour_id,
|
||||
'tosca.policies.nfv.VduInstantiationLevels')
|
||||
for policy in policies:
|
||||
if vdu_name in policy.get('targets', []):
|
||||
return (policy.get('properties', {})
|
||||
.get('levels', {})
|
||||
.get(instantiation_level, {})
|
||||
.get('number_of_instances'))
|
||||
return 0
|
||||
|
||||
def get_placement_groups(self, flavour_id):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
groups = (vnfd.get('topology_template', {})
|
||||
.get('groups', []))
|
||||
if isinstance(groups, dict):
|
||||
groups = [groups]
|
||||
|
||||
ret = {key: value['members']
|
||||
for group in groups for key, value in group.items()
|
||||
if value['type'] == 'tosca.groups.nfv.PlacementGroup'}
|
||||
|
||||
return ret
|
||||
|
||||
def _get_targets(self, flavour_id, affinity_type):
|
||||
policies = self.get_policy_values_by_type(flavour_id, affinity_type)
|
||||
groups = self.get_placement_groups(flavour_id)
|
||||
|
||||
ret = []
|
||||
for policy in policies:
|
||||
scope = policy['properties']['scope']
|
||||
if scope not in ['zone', 'nfvi_node']:
|
||||
continue
|
||||
|
||||
targets = []
|
||||
for target in policy['targets']:
|
||||
if target in list(groups.keys()):
|
||||
targets += groups[target]
|
||||
else:
|
||||
targets.append(target)
|
||||
|
||||
ret.append((targets, scope))
|
||||
|
||||
return ret
|
||||
|
||||
def get_affinity_targets(self, flavour_id):
|
||||
return self._get_targets(flavour_id,
|
||||
'tosca.policies.nfv.AffinityRule')
|
||||
|
||||
def get_anti_affinity_targets(self, flavour_id):
|
||||
return self._get_targets(flavour_id,
|
||||
'tosca.policies.nfv.AntiAffinityRule')
|
||||
|
||||
def get_interface_script(self, flavour_id, operation):
|
||||
vnfd = self.get_vnfd_flavour(flavour_id)
|
||||
nodes = (vnfd.get('topology_template', {})
|
||||
.get('node_templates', {}))
|
||||
for node in nodes.values():
|
||||
if 'interfaces' not in node:
|
||||
continue
|
||||
op_value = (node['interfaces'].get('Vnflcm', {})
|
||||
.get(operation))
|
||||
if not isinstance(op_value, dict):
|
||||
# op_value may be []
|
||||
return
|
||||
|
||||
artifact = op_value.get('implementation')
|
||||
if artifact is None:
|
||||
# no script specified for the operation
|
||||
return
|
||||
|
||||
script = (node.get('artifacts', {})
|
||||
.get(artifact, {})
|
||||
.get('file'))
|
||||
if script is None:
|
||||
# can not happen if vnf package is correct.
|
||||
return
|
||||
|
||||
script_type = node['artifacts'][artifact].get('type')
|
||||
if script_type != 'tosca.artifacts.Implementation.Python':
|
||||
# support python script only at the moment
|
||||
msg = "Unsupported script type {}".format(script_type)
|
||||
raise sol_ex.SolHttpError422(sol_detail=msg)
|
||||
|
||||
return script
|
0
tacker/sol_refactored/conductor/__init__.py
Normal file
0
tacker/sol_refactored/conductor/__init__.py
Normal file
40
tacker/sol_refactored/conductor/conductor_rpc_v2.py
Normal file
40
tacker/sol_refactored/conductor/conductor_rpc_v2.py
Normal file
@ -0,0 +1,40 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import oslo_messaging
|
||||
|
||||
from tacker.common import rpc
|
||||
from tacker.sol_refactored.objects import base as objects_base
|
||||
|
||||
|
||||
TOPIC_CONDUCTOR_V2 = 'TACKER_CONDUCTOR_V2'
|
||||
|
||||
|
||||
class VnfLcmRpcApiV2(object):
|
||||
|
||||
target = oslo_messaging.Target(
|
||||
exchange='tacker',
|
||||
topic=TOPIC_CONDUCTOR_V2,
|
||||
fanout=False,
|
||||
version='1.0')
|
||||
|
||||
def start_lcm_op(self, context, lcmocc_id):
|
||||
serializer = objects_base.TackerObjectSerializer()
|
||||
|
||||
client = rpc.get_client(self.target, version_cap=None,
|
||||
serializer=serializer)
|
||||
cctxt = client.prepare()
|
||||
cctxt.cast(context, 'start_lcm_op', lcmocc_id=lcmocc_id)
|
131
tacker/sol_refactored/conductor/conductor_v2.py
Normal file
131
tacker/sol_refactored/conductor/conductor_v2.py
Normal file
@ -0,0 +1,131 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.common import log
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import coordinate
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.conductor import vnflcm_driver_v2
|
||||
from tacker.sol_refactored.nfvo import nfvo_client
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.sol_refactored.objects.v2 import fields
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class ConductorV2(object):
|
||||
|
||||
def __init__(self):
|
||||
self.vnflcm_driver = vnflcm_driver_v2.VnfLcmDriverV2()
|
||||
self.endpoint = CONF.v2_vnfm.endpoint
|
||||
self.nfvo_client = nfvo_client.NfvoClient()
|
||||
|
||||
def _get_lcm_op_method(self, op, postfix):
|
||||
method = getattr(self.vnflcm_driver, "%s_%s" % (op.lower(), postfix))
|
||||
return method
|
||||
|
||||
def _set_lcmocc_error(self, lcmocc, ex):
|
||||
if isinstance(ex, sol_ex.SolException):
|
||||
problem_details = ex.make_problem_details()
|
||||
else:
|
||||
# program bug. it occurs only under development.
|
||||
problem_details = {'status': 500,
|
||||
'detail': str(ex)}
|
||||
lcmocc.error = objects.ProblemDetails.from_dict(problem_details)
|
||||
|
||||
@log.log
|
||||
def start_lcm_op(self, context, lcmocc_id):
|
||||
lcmocc = lcmocc_utils.get_lcmocc(context, lcmocc_id)
|
||||
|
||||
self._start_lcm_op(context, lcmocc)
|
||||
|
||||
@coordinate.lock_vnf_instance('{lcmocc.vnfInstanceId}', delay=True)
|
||||
def _start_lcm_op(self, context, lcmocc):
|
||||
# just consistency check
|
||||
if lcmocc.operationState != fields.LcmOperationStateType.STARTING:
|
||||
LOG.error("VnfLcmOpOcc unexpected operationState.")
|
||||
return
|
||||
|
||||
inst = inst_utils.get_inst(context, lcmocc.vnfInstanceId)
|
||||
|
||||
# NOTE: error cannot happen to here basically.
|
||||
# if an error occurred lcmocc.opetationState remains STARTING.
|
||||
# see the log of the tacker-conductor to investigate the cause
|
||||
# of error.
|
||||
|
||||
# NOTE: the following flow follows SOL003 5.4.1.2
|
||||
|
||||
# send notification STARTING
|
||||
self.nfvo_client.send_lcmocc_notification(context, lcmocc, inst,
|
||||
self.endpoint)
|
||||
|
||||
try:
|
||||
vnfd = self.nfvo_client.get_vnfd(context, inst.vnfdId,
|
||||
all_contents=True)
|
||||
|
||||
# NOTE: perform grant exchange mainly but also perform
|
||||
# something to do at STATING phase ex. request check.
|
||||
grant_method = self._get_lcm_op_method(lcmocc.operation, 'grant')
|
||||
grant_req, grant = grant_method(context, lcmocc, inst, vnfd)
|
||||
|
||||
lcmocc.operationState = fields.LcmOperationStateType.PROCESSING
|
||||
lcmocc.update(context)
|
||||
except Exception as ex:
|
||||
LOG.exception("STARTING %s failed", lcmocc.operation)
|
||||
lcmocc.operationState = fields.LcmOperationStateType.ROLLED_BACK
|
||||
self._set_lcmocc_error(lcmocc, ex)
|
||||
lcmocc.update(context)
|
||||
|
||||
# send notification PROCESSING or ROLLED_BACK
|
||||
self.nfvo_client.send_lcmocc_notification(context, lcmocc, inst,
|
||||
self.endpoint)
|
||||
|
||||
if lcmocc.operationState != fields.LcmOperationStateType.PROCESSING:
|
||||
return
|
||||
|
||||
try:
|
||||
# perform preamble LCM script
|
||||
start_method = self._get_lcm_op_method(lcmocc.operation, 'start')
|
||||
start_method(context, lcmocc, inst, grant_req, grant, vnfd)
|
||||
|
||||
process_method = self._get_lcm_op_method(lcmocc.operation,
|
||||
'process')
|
||||
process_method(context, lcmocc, inst, grant_req, grant, vnfd)
|
||||
|
||||
# perform postamble LCM script
|
||||
end_method = self._get_lcm_op_method(lcmocc.operation, 'end')
|
||||
end_method(context, lcmocc, inst, grant_req, grant, vnfd)
|
||||
|
||||
lcmocc.operationState = fields.LcmOperationStateType.COMPLETED
|
||||
# update inst and lcmocc at the same time
|
||||
with context.session.begin(subtransactions=True):
|
||||
inst.update(context)
|
||||
lcmocc.update(context)
|
||||
except Exception as ex:
|
||||
LOG.exception("PROCESSING %s failed", lcmocc.operation)
|
||||
lcmocc.operationState = fields.LcmOperationStateType.FAILED_TEMP
|
||||
self._set_lcmocc_error(lcmocc, ex)
|
||||
lcmocc.update(context)
|
||||
|
||||
# send notification COMPLETED or FAILED_TEMP
|
||||
self.nfvo_client.send_lcmocc_notification(context, lcmocc, inst,
|
||||
self.endpoint)
|
29
tacker/sol_refactored/conductor/v2_hook.py
Normal file
29
tacker/sol_refactored/conductor/v2_hook.py
Normal file
@ -0,0 +1,29 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from tacker.sol_refactored.conductor import conductor_rpc_v2
|
||||
from tacker.sol_refactored.conductor import conductor_v2
|
||||
from tacker.sol_refactored.objects import base as objects_base
|
||||
|
||||
|
||||
class ConductorV2Hook(object):
|
||||
|
||||
def initialize_service_hook(self, service):
|
||||
endpoints = [conductor_v2.ConductorV2()]
|
||||
serializer = objects_base.TackerObjectSerializer()
|
||||
service.conn.create_consumer(
|
||||
conductor_rpc_v2.TOPIC_CONDUCTOR_V2, endpoints,
|
||||
serializer=serializer)
|
379
tacker/sol_refactored/conductor/vnflcm_driver_v2.py
Normal file
379
tacker/sol_refactored/conductor/vnflcm_driver_v2.py
Normal file
@ -0,0 +1,379 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import subprocess
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import vim_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.infra_drivers.openstack import openstack
|
||||
from tacker.sol_refactored.nfvo import nfvo_client
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class VnfLcmDriverV2(object):
|
||||
|
||||
def __init__(self):
|
||||
self.endpoint = CONF.v2_vnfm.endpoint
|
||||
self.nfvo_client = nfvo_client.NfvoClient()
|
||||
|
||||
def _get_link_ports(self, inst_req):
|
||||
names = []
|
||||
if inst_req.obj_attr_is_set('extVirtualLinks'):
|
||||
for ext_vl in inst_req.extVirtualLinks:
|
||||
for ext_cp in ext_vl.extCps:
|
||||
for cp_config in ext_cp.cpConfig.values():
|
||||
if cp_config.obj_attr_is_set('linkPortId'):
|
||||
names.append(ext_cp.cpdId)
|
||||
|
||||
if inst_req.obj_attr_is_set('extManagedVirtualLinks'):
|
||||
for ext_mgd_vl in inst_req.extManagedVirtualLinks:
|
||||
if ext_mgd_vl.obj_attr_is_set('vnfLinkPort'):
|
||||
names.append(ext_mgd_vl.vnfVirtualLinkDescId)
|
||||
|
||||
return names
|
||||
|
||||
def instantiate_grant(self, context, lcmocc, inst, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
flavour_id = req.flavourId
|
||||
|
||||
if vnfd.get_vnfd_flavour(flavour_id) is None:
|
||||
raise sol_ex.FlavourIdNotFound(flavour_id=flavour_id)
|
||||
|
||||
# grant exchange
|
||||
# NOTE: the api_version of NFVO supposes 1.4.0 at the moment.
|
||||
grant_req = objects.GrantRequestV1(
|
||||
vnfInstanceId=inst.id,
|
||||
vnfLcmOpOccId=lcmocc.id,
|
||||
vnfdId=inst.vnfdId,
|
||||
flavourId=flavour_id,
|
||||
operation=lcmocc.operation,
|
||||
isAutomaticInvocation=lcmocc.isAutomaticInvocation
|
||||
)
|
||||
|
||||
if req.obj_attr_is_set('instantiationLevelId'):
|
||||
inst_level = req.instantiationLevelId
|
||||
grant_req.instantiationLevelId = inst_level
|
||||
else:
|
||||
inst_level = vnfd.get_default_instantiation_level(flavour_id)
|
||||
|
||||
add_reses = []
|
||||
nodes = vnfd.get_vdu_nodes(flavour_id)
|
||||
link_port_names = self._get_link_ports(req)
|
||||
for name in nodes.keys():
|
||||
num = vnfd.get_vdu_num(flavour_id, name, inst_level)
|
||||
vdu_cp_names = vnfd.get_vdu_cps(flavour_id, name)
|
||||
for _ in range(num):
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='COMPUTE',
|
||||
resourceTemplateId=name)
|
||||
add_reses.append(res_def)
|
||||
|
||||
for cp_name in vdu_cp_names:
|
||||
if cp_name in link_port_names:
|
||||
continue
|
||||
for _ in range(num):
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='LINKPORT',
|
||||
resourceTemplateId=cp_name)
|
||||
add_reses.append(res_def)
|
||||
|
||||
nodes = vnfd.get_storage_nodes(flavour_id)
|
||||
for name in nodes.keys():
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='STORAGE',
|
||||
resourceTemplateId=name)
|
||||
add_reses.append(res_def)
|
||||
|
||||
nodes = vnfd.get_virtual_link_nodes(flavour_id)
|
||||
for name in nodes.keys():
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='VL',
|
||||
resourceTemplateId=name)
|
||||
add_reses.append(res_def)
|
||||
|
||||
if add_reses:
|
||||
grant_req.addResources = add_reses
|
||||
|
||||
# placementConstraints
|
||||
affinity_policies = {
|
||||
'AFFINITY': vnfd.get_affinity_targets(flavour_id),
|
||||
'ANTI_AFFINITY': vnfd.get_anti_affinity_targets(flavour_id)
|
||||
}
|
||||
plc_consts = []
|
||||
for key, value in affinity_policies.items():
|
||||
for targets, scope in value:
|
||||
res_refs = []
|
||||
for target in targets:
|
||||
for res in add_reses:
|
||||
if res.resourceTemplateId == target:
|
||||
res_ref = objects.ConstraintResourceRefV1(
|
||||
idType='GRANT',
|
||||
resourceId=res.id)
|
||||
res_refs.append(res_ref)
|
||||
|
||||
plc_const = objects.PlacementConstraintV1(
|
||||
affinityOrAntiAffinity=key,
|
||||
scope=scope.upper(),
|
||||
resource=res_refs)
|
||||
plc_consts.append(plc_const)
|
||||
|
||||
if plc_consts:
|
||||
grant_req.placementConstraints = plc_consts
|
||||
|
||||
if req.obj_attr_is_set('additionalParams'):
|
||||
grant_req.additionalParams = req.additionalParams
|
||||
|
||||
grant_req._links = objects.GrantRequestV1_Links(
|
||||
vnfLcmOpOcc=objects.Link(
|
||||
href=lcmocc_utils.lcmocc_href(lcmocc.id, self.endpoint)),
|
||||
vnfInstance=objects.Link(
|
||||
href=inst_utils.inst_href(inst.id, self.endpoint)))
|
||||
|
||||
# NOTE: if not granted, 403 error raised.
|
||||
grant_res = self.nfvo_client.grant(context, grant_req)
|
||||
|
||||
# set inst vimConnectionInfo
|
||||
vim_infos = {}
|
||||
if req.obj_attr_is_set('vimConnectionInfo'):
|
||||
vim_infos = req.vimConnectionInfo
|
||||
|
||||
if grant_res.obj_attr_is_set('vimConnectionInfo'):
|
||||
# if NFVO returns vimConnectionInfo use it.
|
||||
# As the controller does for req.vimConnectionInfo, if accessInfo
|
||||
# or interfaceInfo is not specified, get them from VIM DB.
|
||||
# vimId must be in VIM DB.
|
||||
res_vim_infos = grant_res.vimConnectioninfo
|
||||
for key, res_vim_info in res_vim_infos.items():
|
||||
if not (res_vim_info.obj_attr_is_set('accessInfo') and
|
||||
res_vim_info.obj_attr_is_set('interfaceInfo')):
|
||||
vim_info = vim_utils.get_vim(context, res_vim_info.vimId)
|
||||
res_vim_infos[key] = vim_info
|
||||
|
||||
vim_infos = inst_utils.json_merge_patch(vim_infos, res_vim_infos)
|
||||
|
||||
if not vim_infos:
|
||||
# use the default VIM for the project. tacker special.
|
||||
vim_info = vim_utils.get_default_vim(context)
|
||||
if vim_info:
|
||||
vim_infos["default"] = vim_info
|
||||
else:
|
||||
# must be one vimConnectionInfo at least.
|
||||
raise sol_ex.NoVimConnectionInfo()
|
||||
|
||||
inst.vimConnectionInfo = vim_infos
|
||||
|
||||
return grant_req, grant_res
|
||||
|
||||
def instantiate_process(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
if vim_info.vimType == 'ETSINFV.OPENSTACK_KEYSTONE.V_3':
|
||||
driver = openstack.Openstack()
|
||||
driver.instantiate(req, inst, grant_req, grant, vnfd)
|
||||
else:
|
||||
# only support openstack at the moment
|
||||
raise sol_ex.SolException(sol_detail='not support vim type')
|
||||
|
||||
inst.instantiationState = 'INSTANTIATED'
|
||||
lcmocc_utils.make_instantiate_lcmocc(lcmocc, inst)
|
||||
|
||||
def _exec_mgmt_driver_script(self, operation, flavour_id, req, inst,
|
||||
grant_req, grant, vnfd):
|
||||
script = vnfd.get_interface_script(flavour_id, operation)
|
||||
if script is None:
|
||||
return
|
||||
|
||||
tmp_csar_dir = vnfd.make_tmp_csar_dir()
|
||||
script_dict = {
|
||||
'operation': operation,
|
||||
'request': req.to_dict(),
|
||||
'vnf_instance': inst.to_dict(),
|
||||
'grant_request': grant_req.to_dict(),
|
||||
'grant_response': grant.to_dict(),
|
||||
'tmp_csar_dir': tmp_csar_dir
|
||||
}
|
||||
# script is relative path to Definitions/xxx.yaml
|
||||
script_path = os.path.join(tmp_csar_dir, "Definitions", script)
|
||||
|
||||
out = subprocess.run(["python3", script_path],
|
||||
input=pickle.dumps(script_dict),
|
||||
capture_output=True)
|
||||
|
||||
vnfd.remove_tmp_csar_dir(tmp_csar_dir)
|
||||
|
||||
if out.returncode != 0:
|
||||
LOG.debug("execute %s failed: %s", operation, out.stderr)
|
||||
msg = "{} failed: {}".format(operation, out.stderr)
|
||||
raise sol_ex.MgmtDriverExecutionFailed(sol_detail=msg)
|
||||
|
||||
LOG.debug("execute %s of %s success.", operation, script)
|
||||
|
||||
def instantiate_start(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
self._exec_mgmt_driver_script('instantiate_start',
|
||||
req.flavourId, req, inst, grant_req, grant, vnfd)
|
||||
|
||||
def instantiate_end(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
self._exec_mgmt_driver_script('instantiate_end',
|
||||
req.flavourId, req, inst, grant_req, grant, vnfd)
|
||||
|
||||
def terminate_grant(self, context, lcmocc, inst, vnfd):
|
||||
# grant exchange
|
||||
# NOTE: the api_version of NFVO supposes 1.4.0 at the moment.
|
||||
grant_req = objects.GrantRequestV1(
|
||||
vnfInstanceId=inst.id,
|
||||
vnfLcmOpOccId=lcmocc.id,
|
||||
vnfdId=inst.vnfdId,
|
||||
operation=lcmocc.operation,
|
||||
isAutomaticInvocation=lcmocc.isAutomaticInvocation
|
||||
)
|
||||
|
||||
inst_info = inst.instantiatedVnfInfo
|
||||
rm_reses = []
|
||||
vnfc_cps = {}
|
||||
if inst_info.obj_attr_is_set('vnfcResourceInfo'):
|
||||
for inst_vnc in inst_info.vnfcResourceInfo:
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='COMPUTE',
|
||||
resourceTemplateId=inst_vnc.vduId,
|
||||
resource=inst_vnc.computeResource)
|
||||
rm_reses.append(res_def)
|
||||
|
||||
if inst_vnc.obj_attr_is_set('vnfcCpInfo'):
|
||||
for cp_info in inst_vnc.vnfcCpInfo:
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='LINKPORT')
|
||||
rm_reses.append(res_def)
|
||||
vnfc_cps[cp_info.id] = res_def
|
||||
|
||||
if inst_info.obj_attr_is_set('vnfVirtualLinkResourceInfo'):
|
||||
for inst_vl in inst_info.vnfVirtualLinkResourceInfo:
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='VL',
|
||||
resourceTemplateId=inst_vl.vnfVirtualLinkDescId,
|
||||
resource=inst_vl.networkResource)
|
||||
rm_reses.append(res_def)
|
||||
|
||||
if inst_vl.obj_attr_is_set('vnfLinkPorts'):
|
||||
for port in inst_vl.vnfLinkPorts:
|
||||
if port.cpInstanceId in vnfc_cps:
|
||||
res_def = vnfc_cps[port.cpInstanceId]
|
||||
res_def.resource = port.resourceHandle
|
||||
|
||||
if inst_info.obj_attr_is_set('virtualStorageResourceInfo'):
|
||||
for inst_str in inst_info.virtualStorageResourceInfo:
|
||||
res_def = objects.ResourceDefinitionV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
type='STORAGE',
|
||||
resourceTemplateId=inst_str.virtualStorageDescId,
|
||||
resource=inst_str.storageResource)
|
||||
rm_reses.append(res_def)
|
||||
|
||||
if inst_info.obj_attr_is_set('extVirtualLinkInfo'):
|
||||
for ext_vl in inst_info.extVirtualLinkInfo:
|
||||
if ext_vl.obj_attr_is_set('extLinkPorts'):
|
||||
for port in ext_vl.extLinkPorts:
|
||||
if (port.obj_attr_is_set('cpInstanceId') and
|
||||
port.cpInstanceId in vnfc_cps):
|
||||
res_def = vnfc_cps[port.cpInstanceId]
|
||||
res_def.resource = port.resourceHandle
|
||||
|
||||
if inst_info.obj_attr_is_set('extManagedVirtualLinkInfo'):
|
||||
for ext_mgd_vl in inst_info.extManagedVirtualLinkInfo:
|
||||
if ext_mgd_vl.obj_attr_is_set('vnfLinkPorts'):
|
||||
for port in ext_vl.extLinkPorts:
|
||||
if (port.obj_attr_is_set('cpInstanceId') and
|
||||
port.cpInstanceId in vnfc_cps):
|
||||
res_def = vnfc_cps[port.cpInstanceId]
|
||||
res_def.resource = port.resourceHandle
|
||||
|
||||
if rm_reses:
|
||||
grant_req.removeResources = rm_reses
|
||||
|
||||
grant_req._links = objects.GrantRequestV1_Links(
|
||||
vnfLcmOpOcc=objects.Link(
|
||||
href=lcmocc_utils.lcmocc_href(lcmocc.id, self.endpoint)),
|
||||
vnfInstance=objects.Link(
|
||||
href=inst_utils.inst_href(inst.id, self.endpoint)))
|
||||
|
||||
# NOTE: if not granted, 403 error raised.
|
||||
grant_res = self.nfvo_client.grant(context, grant_req)
|
||||
|
||||
return grant_req, grant_res
|
||||
|
||||
def terminate_process(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
if vim_info.vimType == 'ETSINFV.OPENSTACK_KEYSTONE.V_3':
|
||||
driver = openstack.Openstack()
|
||||
driver.terminate(req, inst, grant_req, grant, vnfd)
|
||||
else:
|
||||
# only support openstack at the moment
|
||||
raise sol_ex.SolException(sol_detail='not support vim type')
|
||||
|
||||
inst.instantiationState = 'NOT_INSTANTIATED'
|
||||
lcmocc_utils.make_terminate_lcmocc(lcmocc, inst)
|
||||
|
||||
# reset instantiatedVnfInfo
|
||||
# NOTE: reset after update lcmocc
|
||||
inst_vnf_info = objects.VnfInstanceV2_InstantiatedVnfInfo(
|
||||
flavourId=inst.instantiatedVnfInfo.flavourId,
|
||||
vnfState='STOPPED',
|
||||
# NOTE: extCpInfo is omitted. its cardinality is 1..N but it is
|
||||
# meaningless to have it for terminated vnf instance.
|
||||
)
|
||||
inst.instantiatedVnfInfo = inst_vnf_info
|
||||
|
||||
# reset vimConnectionInfo
|
||||
inst.vimConnectionInfo = {}
|
||||
|
||||
def terminate_start(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
flavour_id = inst.instantiatedVnfInfo.flavourId
|
||||
self._exec_mgmt_driver_script('terminate_start',
|
||||
flavour_id, req, inst, grant_req, grant, vnfd)
|
||||
|
||||
def terminate_end(self, context, lcmocc, inst, grant_req,
|
||||
grant, vnfd):
|
||||
req = lcmocc.operationParams
|
||||
flavour_id = inst.instantiatedVnfInfo.flavourId
|
||||
self._exec_mgmt_driver_script('terminate_end',
|
||||
flavour_id, req, inst, grant_req, grant, vnfd)
|
0
tacker/sol_refactored/controller/__init__.py
Normal file
0
tacker/sol_refactored/controller/__init__.py
Normal file
338
tacker/sol_refactored/controller/vnflcm_v2.py
Normal file
338
tacker/sol_refactored/controller/vnflcm_v2.py
Normal file
@ -0,0 +1,338 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.api.api_version import supported_versions_v2
|
||||
from tacker.sol_refactored.api.schemas import vnflcm_v2 as schema
|
||||
from tacker.sol_refactored.api import validator
|
||||
from tacker.sol_refactored.api import wsgi as sol_wsgi
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import coordinate
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import subscription_utils as subsc_utils
|
||||
from tacker.sol_refactored.common import vim_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.conductor import conductor_rpc_v2
|
||||
from tacker.sol_refactored.controller import vnflcm_view
|
||||
from tacker.sol_refactored.nfvo import nfvo_client
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.sol_refactored.objects.v2 import fields as v2fields
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__) # NOTE: unused at the moment
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class VnfLcmControllerV2(sol_wsgi.SolAPIController):
|
||||
|
||||
def __init__(self):
|
||||
self.nfvo_client = nfvo_client.NfvoClient()
|
||||
self.endpoint = CONF.v2_vnfm.endpoint
|
||||
self.conductor_rpc = conductor_rpc_v2.VnfLcmRpcApiV2()
|
||||
self._inst_view = vnflcm_view.InstanceViewBuilder(self.endpoint)
|
||||
self._lcmocc_view = vnflcm_view.LcmOpOccViewBuilder(self.endpoint)
|
||||
self._subsc_view = vnflcm_view.SubscriptionViewBuilder(self.endpoint)
|
||||
|
||||
def api_versions(self, request):
|
||||
return sol_wsgi.SolResponse(200, supported_versions_v2)
|
||||
|
||||
@validator.schema(schema.CreateVnfRequest_V200, '2.0.0')
|
||||
def create(self, request, body):
|
||||
context = request.context
|
||||
vnfd_id = body['vnfdId']
|
||||
|
||||
pkg_info = self.nfvo_client.get_vnf_package_info_vnfd(
|
||||
context, vnfd_id)
|
||||
if pkg_info.operationalState != "ENABLED":
|
||||
raise sol_ex.VnfdIdNotEnabled(vnfd_id=vnfd_id)
|
||||
|
||||
vnfd = self.nfvo_client.get_vnfd(context, vnfd_id)
|
||||
vnfd_prop = vnfd.get_vnfd_properties()
|
||||
|
||||
metadata = vnfd_prop['metadata']
|
||||
if 'metadata' in body:
|
||||
metadata = inst_utils.json_merge_patch(metadata, body['metadata'])
|
||||
|
||||
inst = objects.VnfInstanceV2(
|
||||
# required fields
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnfdId=vnfd_id,
|
||||
vnfProvider=pkg_info.vnfProvider,
|
||||
vnfProductName=pkg_info.vnfProductName,
|
||||
vnfSoftwareVersion=pkg_info.vnfSoftwareVersion,
|
||||
vnfdVersion=pkg_info.vnfdVersion,
|
||||
instantiationState='NOT_INSTANTIATED',
|
||||
# optional fields
|
||||
# NOTE: it is OK to absent but fill empty value to make them
|
||||
# handle easy.
|
||||
vnfInstanceName=body.get('vnfInstanceName', ""),
|
||||
vnfInstanceDescription=body.get('vnfInstanceDescription', ""),
|
||||
vnfConfigurableProperties=vnfd_prop['vnfConfigurableProperties'],
|
||||
metadata=metadata,
|
||||
extensions=vnfd_prop['extensions']
|
||||
# not set at the moment. will be set when instantiate.
|
||||
# vimConnectionInfo
|
||||
# instantiatedVnfInfo
|
||||
)
|
||||
|
||||
inst.create(context)
|
||||
|
||||
self.nfvo_client.send_inst_create_notification(context, inst,
|
||||
self.endpoint)
|
||||
resp_body = self._inst_view.detail(inst)
|
||||
location = inst_utils.inst_href(inst.id, self.endpoint)
|
||||
|
||||
return sol_wsgi.SolResponse(201, resp_body, location=location)
|
||||
|
||||
def index(self, request):
|
||||
filter_param = request.GET.get('filter')
|
||||
if filter_param is not None:
|
||||
filters = self._inst_view.parse_filter(filter_param)
|
||||
else:
|
||||
filters = None
|
||||
# validate_filter
|
||||
|
||||
selector = self._inst_view.parse_selector(request.GET)
|
||||
|
||||
insts = inst_utils.get_inst_all(request.context)
|
||||
|
||||
resp_body = self._inst_view.detail_list(insts, filters, selector)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
def show(self, request, id):
|
||||
inst = inst_utils.get_inst(request.context, id)
|
||||
|
||||
resp_body = self._inst_view.detail(inst)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
@coordinate.lock_vnf_instance('{id}')
|
||||
def delete(self, request, id):
|
||||
context = request.context
|
||||
inst = inst_utils.get_inst(request.context, id)
|
||||
|
||||
if inst.instantiationState != 'NOT_INSTANTIATED':
|
||||
raise sol_ex.VnfInstanceIsInstantiated(inst_id=id)
|
||||
|
||||
inst.delete(context)
|
||||
|
||||
# NOTE: inst record in DB deleted but inst object still
|
||||
# can be accessed.
|
||||
self.nfvo_client.send_inst_delete_notification(context, inst,
|
||||
self.endpoint)
|
||||
return sol_wsgi.SolResponse(204, None)
|
||||
|
||||
@validator.schema(schema.InstantiateVnfRequest_V200, '2.0.0')
|
||||
@coordinate.lock_vnf_instance('{id}')
|
||||
def instantiate(self, request, id, body):
|
||||
context = request.context
|
||||
inst = inst_utils.get_inst(context, id)
|
||||
|
||||
if inst.instantiationState != 'NOT_INSTANTIATED':
|
||||
raise sol_ex.VnfInstanceIsInstantiated(inst_id=id)
|
||||
|
||||
now = datetime.utcnow()
|
||||
lcmocc = objects.VnfLcmOpOccV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
operationState=v2fields.LcmOperationStateType.STARTING,
|
||||
stateEnteredTime=now,
|
||||
startTime=now,
|
||||
vnfInstanceId=id,
|
||||
operation=v2fields.LcmOperationType.INSTANTIATE,
|
||||
isAutomaticInvocation=False,
|
||||
isCancelPending=False,
|
||||
operationParams=body)
|
||||
|
||||
req_param = lcmocc.operationParams
|
||||
# if there is partial vimConnectionInfo check and fulfill here.
|
||||
if req_param.obj_attr_is_set('vimConnectionInfo'):
|
||||
# if accessInfo or interfaceInfo is not specified, get from
|
||||
# VIM DB. vimId must be in VIM DB.
|
||||
req_vim_infos = req_param.vimConnectionInfo
|
||||
for key, req_vim_info in req_vim_infos.items():
|
||||
if not (req_vim_info.obj_attr_is_set('accessInfo') and
|
||||
req_vim_info.obj_attr_is_set('interfaceInfo')):
|
||||
vim_info = vim_utils.get_vim(context, req_vim_info.vimId)
|
||||
req_vim_infos[key] = vim_info
|
||||
|
||||
lcmocc.create(context)
|
||||
|
||||
self.conductor_rpc.start_lcm_op(context, lcmocc.id)
|
||||
|
||||
location = lcmocc_utils.lcmocc_href(lcmocc.id, self.endpoint)
|
||||
|
||||
return sol_wsgi.SolResponse(202, None, location=location)
|
||||
|
||||
@validator.schema(schema.TerminateVnfRequest_V200, '2.0.0')
|
||||
@coordinate.lock_vnf_instance('{id}')
|
||||
def terminate(self, request, id, body):
|
||||
context = request.context
|
||||
inst = inst_utils.get_inst(context, id)
|
||||
|
||||
if inst.instantiationState != 'INSTANTIATED':
|
||||
raise sol_ex.VnfInstanceIsNotInstantiated(inst_id=id)
|
||||
|
||||
now = datetime.utcnow()
|
||||
lcmocc = objects.VnfLcmOpOccV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
operationState=v2fields.LcmOperationStateType.STARTING,
|
||||
stateEnteredTime=now,
|
||||
startTime=now,
|
||||
vnfInstanceId=id,
|
||||
operation=v2fields.LcmOperationType.TERMINATE,
|
||||
isAutomaticInvocation=False,
|
||||
isCancelPending=False,
|
||||
operationParams=body)
|
||||
|
||||
lcmocc.create(context)
|
||||
|
||||
self.conductor_rpc.start_lcm_op(context, lcmocc.id)
|
||||
|
||||
location = lcmocc_utils.lcmocc_href(lcmocc.id, self.endpoint)
|
||||
|
||||
return sol_wsgi.SolResponse(202, None, location=location)
|
||||
|
||||
@validator.schema(schema.LccnSubscriptionRequest_V200, '2.0.0')
|
||||
def subscription_create(self, request, body):
|
||||
context = request.context
|
||||
subsc = objects.LccnSubscriptionV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
callbackUri=body['callbackUri'],
|
||||
verbosity=body.get('verbosity', 'FULL') # default is 'FULL'
|
||||
)
|
||||
|
||||
if body.get('filter'):
|
||||
subsc.filter = (
|
||||
objects.LifecycleChangeNotificationsFilterV2.from_dict(
|
||||
body['filter'])
|
||||
)
|
||||
|
||||
auth_req = body.get('authentication')
|
||||
if auth_req:
|
||||
auth = objects.SubscriptionAuthentication(
|
||||
authType=auth_req['authType']
|
||||
)
|
||||
if 'BASIC' in auth.authType:
|
||||
basic_req = auth_req.get('paramsBasic')
|
||||
if basic_req is None:
|
||||
msg = "ParmasBasic must be specified."
|
||||
raise sol_ex.InvalidSubscription(sol_detail=msg)
|
||||
auth.paramsBasic = (
|
||||
objects.SubscriptionAuthentication_ParamsBasic(
|
||||
userName=basic_req.get('userName'),
|
||||
password=basic_req.get('password')
|
||||
)
|
||||
)
|
||||
|
||||
if 'OAUTH2_CLIENT_CREDENTIALS' in auth.authType:
|
||||
oauth2_req = auth_req.get('paramsOauth2ClientCredentials')
|
||||
if oauth2_req is None:
|
||||
msg = "paramsOauth2ClientCredentials must be specified."
|
||||
raise sol_ex.InvalidSubscription(sol_detail=msg)
|
||||
auth.paramsOauth2ClientCredentials = (
|
||||
objects.SubscriptionAuthentication_ParamsOauth2(
|
||||
clientId=oauth2_req.get('clientId'),
|
||||
clientPassword=oauth2_req.get('clientPassword'),
|
||||
tokenEndpoint=oauth2_req.get('tokenEndpoint')
|
||||
)
|
||||
)
|
||||
|
||||
if 'TLS_CERT' in auth.authType:
|
||||
msg = "'TLS_CERT' is not supported at the moment."
|
||||
raise sol_ex.InvalidSubscription(sol_detail=msg)
|
||||
|
||||
subsc.authentication = auth
|
||||
|
||||
if CONF.v2_nfvo.test_callback_uri:
|
||||
subsc_utils.test_notification(subsc)
|
||||
|
||||
subsc.create(context)
|
||||
|
||||
resp_body = self._subsc_view.detail(subsc)
|
||||
self_href = subsc_utils.subsc_href(subsc.id, self.endpoint)
|
||||
|
||||
return sol_wsgi.SolResponse(201, resp_body, location=self_href)
|
||||
|
||||
def subscription_list(self, request):
|
||||
filter_param = request.GET.get('filter')
|
||||
if filter_param is not None:
|
||||
filters = self._subsc_view.parse_filter(filter_param)
|
||||
else:
|
||||
filters = None
|
||||
|
||||
subscs = subsc_utils.get_subsc_all(request.context)
|
||||
|
||||
resp_body = self._subsc_view.detail_list(subscs, filters)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
def subscription_show(self, request, id):
|
||||
subsc = subsc_utils.get_subsc(request.context, id)
|
||||
|
||||
resp_body = self._subsc_view.detail(subsc)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
def subscription_delete(self, request, id):
|
||||
context = request.context
|
||||
subsc = subsc_utils.get_subsc(request.context, id)
|
||||
|
||||
subsc.delete(context)
|
||||
|
||||
return sol_wsgi.SolResponse(204, None)
|
||||
|
||||
def lcm_op_occ_list(self, request):
|
||||
filter_param = request.GET.get('filter')
|
||||
if filter_param is not None:
|
||||
filters = self._lcmocc_view.parse_filter(filter_param)
|
||||
else:
|
||||
filters = None
|
||||
|
||||
selector = self._lcmocc_view.parse_selector(request.GET)
|
||||
|
||||
lcmoccs = lcmocc_utils.get_lcmocc_all(request.context)
|
||||
|
||||
resp_body = self._lcmocc_view.detail_list(lcmoccs, filters, selector)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
def lcm_op_occ_show(self, request, id):
|
||||
lcmocc = lcmocc_utils.get_lcmocc(request.context, id)
|
||||
|
||||
resp_body = self._lcmocc_view.detail(lcmocc)
|
||||
|
||||
return sol_wsgi.SolResponse(200, resp_body)
|
||||
|
||||
def lcm_op_occ_delete(self, request, id):
|
||||
# not allowed to delete on the specification
|
||||
if not CONF.v2_vnfm.test_enable_lcm_op_occ_delete:
|
||||
raise sol_ex.MethodNotAllowed(method='DELETE')
|
||||
|
||||
# NOTE: This is for test use since it is inconvenient not to be
|
||||
# able to delete.
|
||||
context = request.context
|
||||
lcmocc = lcmocc_utils.get_lcmocc(context, id)
|
||||
|
||||
lcmocc.delete(context)
|
||||
|
||||
return sol_wsgi.SolResponse(204, None)
|
27
tacker/sol_refactored/controller/vnflcm_versions.py
Normal file
27
tacker/sol_refactored/controller/vnflcm_versions.py
Normal file
@ -0,0 +1,27 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.api import wsgi as sol_wsgi
|
||||
|
||||
|
||||
class VnfLcmVersionsController(sol_wsgi.SolAPIController):
|
||||
def index(self, request):
|
||||
api_versions = (api_version.supported_versions_v1['apiVersions'] +
|
||||
api_version.supported_versions_v2['apiVersions'])
|
||||
body = {"uriPrefix": "/vnflcm",
|
||||
"apiVersions": api_versions}
|
||||
return sol_wsgi.SolResponse(200, body)
|
366
tacker/sol_refactored/controller/vnflcm_view.py
Normal file
366
tacker/sol_refactored/controller/vnflcm_view.py
Normal file
@ -0,0 +1,366 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
from dateutil import parser
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import subscription_utils as subsc_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FilterExpr(object):
|
||||
def __init__(self, op, attr, values):
|
||||
self.op = op
|
||||
self.attr = attr
|
||||
self.values = values
|
||||
|
||||
def match_eq(self, val):
|
||||
return val == self.values[0]
|
||||
|
||||
def match_neq(self, val):
|
||||
return val != self.values[0]
|
||||
|
||||
def match_in(self, val):
|
||||
return val in self.values
|
||||
|
||||
def match_nin(self, val):
|
||||
return val not in self.values
|
||||
|
||||
def match_gt(self, val):
|
||||
return val > self.values[0]
|
||||
|
||||
def match_gte(self, val):
|
||||
return val >= self.values[0]
|
||||
|
||||
def match_lt(self, val):
|
||||
return val < self.values[0]
|
||||
|
||||
def match_lte(self, val):
|
||||
return val <= self.values[0]
|
||||
|
||||
def match_cont(self, val):
|
||||
for v in self.values:
|
||||
if v in val:
|
||||
return True
|
||||
return False
|
||||
|
||||
def match_ncont(self, val):
|
||||
return not self.match_cont(val)
|
||||
|
||||
def match(self, val):
|
||||
try:
|
||||
for a in self.attr:
|
||||
# TODO(toshii): handle "@key"
|
||||
val = val[a]
|
||||
except KeyError:
|
||||
LOG.debug("Attr %s not found in %s", self.attr, val)
|
||||
return False
|
||||
LOG.debug("Key %s type %s", self.attr, type(val))
|
||||
# If not str, assume type conversion is already done.
|
||||
# Note: It is assumed that the type doesn't change between calls,
|
||||
# which can be problematic with KeyValuePairs.
|
||||
if isinstance(self.values[0], str):
|
||||
if isinstance(val, datetime):
|
||||
self.values[0] = parser.isoparse(self.values[0])
|
||||
elif isinstance(val, bool):
|
||||
self.values[0] = bool(self.values[0])
|
||||
elif isinstance(val, int):
|
||||
self.values = [int(v) for v in self.values]
|
||||
elif isinstance(val, float):
|
||||
self.values = [float(v) for v in self.values]
|
||||
|
||||
return getattr(self, "match_" + self.op)(val)
|
||||
|
||||
|
||||
class AttributeSelector(object):
|
||||
def __init__(self, default_exclude_list, all_fields=None, fields=None,
|
||||
exclude_fields=None, exclude_default=None):
|
||||
self.exclude_fields = []
|
||||
self.fields = []
|
||||
if all_fields is not None:
|
||||
if fields is not None or exclude_fields is not None or \
|
||||
exclude_default is not None:
|
||||
raise sol_ex.InvalidAttributeSelector()
|
||||
# Nothing to do
|
||||
elif fields is not None:
|
||||
if exclude_fields is not None:
|
||||
raise sol_ex.InvalidAttributeSelector()
|
||||
self.fields = fields.split(',')
|
||||
if exclude_default is not None:
|
||||
self.exclude_fields = [v for v in default_exclude_list
|
||||
if v not in self.fields]
|
||||
elif exclude_fields is not None:
|
||||
if exclude_default is not None:
|
||||
raise sol_ex.InvalidAttributeSelector()
|
||||
self.exclude_fields = exclude_fields.split(',')
|
||||
else:
|
||||
self.exclude_fields = default_exclude_list
|
||||
|
||||
def filter(self, obj, odict):
|
||||
deleted = {}
|
||||
if self.exclude_fields:
|
||||
excl_fields = self.exclude_fields
|
||||
else:
|
||||
if not self.fields:
|
||||
# Implies all_fields
|
||||
return odict
|
||||
excl_fields = [k for k in odict.keys() if k not in self.fields]
|
||||
|
||||
for k in excl_fields:
|
||||
klist = k.split('/')
|
||||
if len(klist) > 1:
|
||||
# TODO(toshii): check if this nested field is nullable
|
||||
pass
|
||||
else:
|
||||
if not obj.fields[klist[0]].nullable:
|
||||
continue
|
||||
val = odict
|
||||
deleted_ptr = deleted
|
||||
try:
|
||||
for i, k1 in enumerate(klist, start=1):
|
||||
if i == len(klist):
|
||||
deleted_ptr[k1] = val[k1]
|
||||
del val[k1]
|
||||
else:
|
||||
val = val[k1]
|
||||
if k1 not in deleted_ptr:
|
||||
deleted_ptr[k1] = {}
|
||||
deleted_ptr = deleted_ptr[k1]
|
||||
except KeyError:
|
||||
pass
|
||||
if not self.fields:
|
||||
return odict
|
||||
|
||||
# Readd partial dictionary content
|
||||
for k in self.fields:
|
||||
klist = k.split('/')
|
||||
val = odict
|
||||
deleted_ptr = deleted
|
||||
try:
|
||||
for i, k1 in enumerate(klist, start=1):
|
||||
if i == len(klist):
|
||||
val[k1] = deleted_ptr[k1]
|
||||
else:
|
||||
if k1 not in val:
|
||||
val[k1] = {}
|
||||
val = val[k1]
|
||||
deleted_ptr = deleted_ptr[k1]
|
||||
except KeyError:
|
||||
LOG.debug("Key %s not found in %s or %s", k1, val, deleted_ptr)
|
||||
return odict
|
||||
|
||||
|
||||
class BaseViewBuilder(object):
|
||||
value_regexp = r"([^',)]+|('[^']*')+)"
|
||||
value_re = re.compile(value_regexp)
|
||||
simpleFilterExpr_re = re.compile(r"\(([a-z]+),([^,]+)(," +
|
||||
value_regexp + r")+\)")
|
||||
tildeEscape_re = re.compile(r"~([1ab])")
|
||||
opOne = ['eq', 'neq', 'gt', 'gte', 'lt', 'lte']
|
||||
opMulti = ['in', 'nin', 'cont', 'ncont']
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def parse_attr(self, attr):
|
||||
def tilde_unescape(string):
|
||||
def repl(m):
|
||||
if m.group(1) == '1':
|
||||
return '/'
|
||||
elif m.group(1) == 'a':
|
||||
return ','
|
||||
elif m.group(1) == 'b':
|
||||
return '@'
|
||||
|
||||
s1 = self.tildeEscape_re.sub(repl, string)
|
||||
return re.sub('~0', '~', s1)
|
||||
|
||||
attrs = attr.split('/')
|
||||
# TODO(toshii): handle "@key"
|
||||
return [tilde_unescape(a) for a in attrs]
|
||||
|
||||
def parse_values(self, values):
|
||||
loc = 0
|
||||
res = []
|
||||
while loc < len(values):
|
||||
if values[loc] != ",":
|
||||
LOG.debug("comma expected, %s at loc %d", values, loc)
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail=("value parse error. comma expected, %s" %
|
||||
values))
|
||||
loc += 1
|
||||
m = self.value_re.match(values[loc:])
|
||||
if m is None:
|
||||
LOG.debug("value parse error, %s at loc %d", values, loc)
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail="value parse error")
|
||||
loc += m.end()
|
||||
if m.group(0).startswith("'"):
|
||||
res.append(re.sub("''", "'", m.group(0)[1:-1]))
|
||||
else:
|
||||
res.append(m.group(0))
|
||||
return res
|
||||
|
||||
def parse_filter(self, filter):
|
||||
"""Implement SOL013 5.2 Attribute-based filtering"""
|
||||
|
||||
loc = 0
|
||||
res = []
|
||||
while True:
|
||||
m = self.simpleFilterExpr_re.match(filter[loc:])
|
||||
if m is None:
|
||||
LOG.debug("filter %s parse error at char %d", filter, loc)
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail="filter parse error")
|
||||
op = m.group(1)
|
||||
if op not in self.opOne and op not in self.opMulti:
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail=("Invalid op %s" % op))
|
||||
values = self.parse_values(
|
||||
filter[(loc + m.end(2)):(loc + m.end(3))])
|
||||
if len(values) > 1 and op not in self.opMulti:
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail=("Only one value is allowed for op %s" % op))
|
||||
res.append(FilterExpr(op, self.parse_attr(m.group(2)), values))
|
||||
loc += m.end()
|
||||
if loc == len(filter):
|
||||
return res
|
||||
if filter[loc] != ';':
|
||||
LOG.debug("filter %s parse error at char %d "
|
||||
"(semicolon expected)", filter, loc)
|
||||
raise sol_ex.InvalidAttributeFilter(
|
||||
sol_detail="filter parse error. semicolon expected.")
|
||||
loc += 1
|
||||
|
||||
def parse_selector(self, req):
|
||||
"""Implement SOL013 5.3 Attribute selectors"""
|
||||
params = {}
|
||||
for k in ['all_fields', 'fields', 'exclude_fields', 'exclude_default']:
|
||||
v = req.get(k)
|
||||
if v is not None:
|
||||
params[k] = v
|
||||
return AttributeSelector(self._EXCLUDE_DEFAULT, **params)
|
||||
|
||||
def match_filters(self, val, filters):
|
||||
if filters is None:
|
||||
return True
|
||||
|
||||
for f in filters:
|
||||
if not f.match(val):
|
||||
return False
|
||||
return True
|
||||
|
||||
def detail_list(self, values, filters, selector):
|
||||
return [self.detail(v, selector) for v in values
|
||||
if self.match_filters(v, filters)]
|
||||
|
||||
|
||||
class InstanceViewBuilder(BaseViewBuilder):
|
||||
_EXCLUDE_DEFAULT = ['vnfConfigurableProperties',
|
||||
'vimConnectionInfo',
|
||||
'instantiatedVnfInfo',
|
||||
'metadata',
|
||||
'extensions']
|
||||
|
||||
def __init__(self, endpoint):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def parse_filter(self, filter):
|
||||
return super().parse_filter(filter)
|
||||
|
||||
def detail(self, inst, selector=None):
|
||||
# NOTE: _links is not saved in DB. create when it is necessary.
|
||||
if not inst.obj_attr_is_set('_links'):
|
||||
inst._links = inst_utils.make_inst_links(inst, self.endpoint)
|
||||
|
||||
resp = inst.to_dict()
|
||||
|
||||
# remove password from vim_connection_info
|
||||
# see SOL003 4.4.1.6
|
||||
for vim_info in resp.get('vimConnectionInfo', {}).values():
|
||||
if ('accessInfo' in vim_info and
|
||||
'password' in vim_info['accessInfo']):
|
||||
vim_info['accessInfo'].pop('password')
|
||||
|
||||
if selector is not None:
|
||||
resp = selector.filter(inst, resp)
|
||||
return resp
|
||||
|
||||
def detail_list(self, insts, filters, selector):
|
||||
return super().detail_list(insts, filters, selector)
|
||||
|
||||
|
||||
class LcmOpOccViewBuilder(BaseViewBuilder):
|
||||
_EXCLUDE_DEFAULT = ['operationParams',
|
||||
'error',
|
||||
'resourceChanges',
|
||||
'changedInfo',
|
||||
'changedExtConnectivity']
|
||||
|
||||
def __init__(self, endpoint):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def parse_filter(self, filter):
|
||||
return super().parse_filter(filter)
|
||||
|
||||
def detail(self, lcmocc, selector=None):
|
||||
# NOTE: _links is not saved in DB. create when it is necessary.
|
||||
if not lcmocc.obj_attr_is_set('_links'):
|
||||
lcmocc._links = lcmocc_utils.make_lcmocc_links(lcmocc,
|
||||
self.endpoint)
|
||||
resp = lcmocc.to_dict()
|
||||
if selector is not None:
|
||||
resp = selector.filter(lcmocc, resp)
|
||||
return resp
|
||||
|
||||
def detail_list(self, lcmoccs, filters, selector):
|
||||
return super().detail_list(lcmoccs, filters, selector)
|
||||
|
||||
|
||||
class SubscriptionViewBuilder(BaseViewBuilder):
|
||||
def __init__(self, endpoint):
|
||||
self.endpoint = endpoint
|
||||
|
||||
def parse_filter(self, filter):
|
||||
return super().parse_filter(filter)
|
||||
|
||||
def detail(self, subsc, selector=None):
|
||||
# NOTE: _links is not saved in DB. create when it is necessary.
|
||||
if not subsc.obj_attr_is_set('_links'):
|
||||
self_href = subsc_utils.subsc_href(subsc.id, self.endpoint)
|
||||
subsc._links = objects.LccnSubscriptionV2_Links()
|
||||
subsc._links.self = objects.Link(href=self_href)
|
||||
|
||||
resp = subsc.to_dict()
|
||||
|
||||
# NOTE: authentication is not included in LccnSubscription
|
||||
resp.pop('authentication', None)
|
||||
|
||||
if selector is not None:
|
||||
resp = selector.filter(subsc, resp)
|
||||
return resp
|
||||
|
||||
def detail_list(self, subscs, filters):
|
||||
return super().detail_list(subscs, filters, None)
|
0
tacker/sol_refactored/db/__init__.py
Normal file
0
tacker/sol_refactored/db/__init__.py
Normal file
24
tacker/sol_refactored/db/api.py
Normal file
24
tacker/sol_refactored/db/api.py
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
|
||||
|
||||
context_manager = enginefacade.transaction_context()
|
||||
|
||||
|
||||
def configure():
|
||||
context_manager.configure(sqlite_fk=True, **cfg.CONF.database)
|
0
tacker/sol_refactored/db/sqlalchemy/__init__.py
Normal file
0
tacker/sol_refactored/db/sqlalchemy/__init__.py
Normal file
100
tacker/sol_refactored/db/sqlalchemy/models.py
Normal file
100
tacker/sol_refactored/db/sqlalchemy/models.py
Normal file
@ -0,0 +1,100 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from tacker.db import model_base
|
||||
|
||||
|
||||
class LccnSubscriptionV2(model_base.BASE):
|
||||
"""Type: LccnSubscription
|
||||
|
||||
NFV-SOL 003
|
||||
- v3.3.1 5.5.2.16 (API version: 2.0.0)
|
||||
"""
|
||||
|
||||
__tablename__ = 'LccnSubscriptionV2'
|
||||
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
|
||||
filter = sa.Column(sa.JSON(), nullable=True)
|
||||
callbackUri = sa.Column(sa.String(255), nullable=False)
|
||||
# NOTE: 'authentication' attribute is not included in the
|
||||
# original 'LccnSubscription' data type definition.
|
||||
authentication = sa.Column(sa.JSON(), nullable=True)
|
||||
verbosity = sa.Column(sa.Enum('FULL', 'SHORT', create_constraint=True,
|
||||
validate_strings=True), nullable=False)
|
||||
|
||||
|
||||
class VnfInstanceV2(model_base.BASE):
|
||||
"""Type: VnfInstance
|
||||
|
||||
NFV-SOL 003
|
||||
- v3.3.1 5.5.2.2 (API version: 2.0.0)
|
||||
"""
|
||||
|
||||
__tablename__ = 'VnfInstanceV2'
|
||||
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
|
||||
vnfInstanceName = sa.Column(sa.String(255), nullable=True)
|
||||
vnfInstanceDescription = sa.Column(sa.Text(), nullable=True)
|
||||
vnfdId = sa.Column(sa.String(255), nullable=False)
|
||||
vnfProvider = sa.Column(sa.String(255), nullable=False)
|
||||
vnfProductName = sa.Column(sa.String(255), nullable=False)
|
||||
vnfSoftwareVersion = sa.Column(sa.String(255), nullable=False)
|
||||
vnfdVersion = sa.Column(sa.String(255), nullable=False)
|
||||
vnfConfigurableProperties = sa.Column(sa.JSON(), nullable=True)
|
||||
vimConnectionInfo = sa.Column(sa.JSON(), nullable=True)
|
||||
instantiationState = sa.Column(sa.Enum(
|
||||
'NOT_INSTANTIATED', 'INSTANTIATED', create_constraint=True,
|
||||
validate_strings=True), nullable=False)
|
||||
instantiatedVnfInfo = sa.Column(sa.JSON(), nullable=True)
|
||||
metadata__ = sa.Column("metadata", sa.JSON(), nullable=True)
|
||||
extensions = sa.Column(sa.JSON(), nullable=True)
|
||||
|
||||
|
||||
class VnfLcmOpOccV2(model_base.BASE):
|
||||
"""Type: VnfLcmOpOcc
|
||||
|
||||
NFV-SOL 003
|
||||
- v3.3.1 5.5.2.13 (API version: 2.0.0)
|
||||
"""
|
||||
|
||||
__tablename__ = 'VnfLcmOpOccV2'
|
||||
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
|
||||
operationState = sa.Column(sa.Enum(
|
||||
'STARTING', 'PROCESSING', 'COMPLETED', 'FAILED_TEMP',
|
||||
'FAILED', 'ROLLING_BACK', 'ROLLED_BACK',
|
||||
create_constraint=True, validate_strings=True), nullable=False)
|
||||
stateEnteredTime = sa.Column(sa.DateTime(), nullable=False)
|
||||
startTime = sa.Column(sa.DateTime(), nullable=False)
|
||||
vnfInstanceId = sa.Column(sa.String(255), nullable=False)
|
||||
grantId = sa.Column(sa.String(255), nullable=True)
|
||||
operation = sa.Column(sa.Enum(
|
||||
'INSTANTIATE', 'SCALE', 'SCALE_TO_LEVEL', 'CHANGE_FLAVOUR',
|
||||
'TERMINATE', 'HEAL', 'OPERATE', 'CHANGE_EXT_CONN',
|
||||
'MODIFY_INFO', 'CREATE_SNAPSHOT', 'REVERT_TO_SNAPSHOT',
|
||||
'CHANGE_VNFPKG', create_constraint=True, validate_strings=True),
|
||||
nullable=False)
|
||||
isAutomaticInvocation = sa.Column(sa.Boolean, nullable=False)
|
||||
operationParams = sa.Column(sa.JSON(), nullable=True)
|
||||
isCancelPending = sa.Column(sa.Boolean, nullable=False)
|
||||
cancelMode = sa.Column(sa.Enum(
|
||||
'GRACEFUL', 'FORCEFUL', create_constraint=True, validate_strings=True),
|
||||
nullable=True)
|
||||
error = sa.Column(sa.JSON(), nullable=True)
|
||||
resourceChanges = sa.Column(sa.JSON(), nullable=True)
|
||||
changedInfo = sa.Column(sa.JSON(), nullable=True)
|
||||
changedExtConnectivity = sa.Column(sa.JSON(), nullable=True)
|
||||
modificationsTriggeredByVnfPkgChange = sa.Column(sa.JSON(), nullable=True)
|
||||
vnfSnapshotInfoId = sa.Column(sa.String(255), nullable=True)
|
130
tacker/sol_refactored/infra_drivers/openstack/heat_utils.py
Normal file
130
tacker/sol_refactored/infra_drivers/openstack/heat_utils.py
Normal file
@ -0,0 +1,130 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_service import loopingcall
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import http_client
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CHECK_INTERVAL = 5
|
||||
|
||||
|
||||
class HeatClient(object):
|
||||
|
||||
def __init__(self, vim_info):
|
||||
auth = http_client.KeystonePasswordAuthHandle(
|
||||
auth_url=vim_info.interfaceInfo['endpoint'],
|
||||
username=vim_info.accessInfo['username'],
|
||||
password=vim_info.accessInfo['password'],
|
||||
project_name=vim_info.accessInfo['project'],
|
||||
user_domain_name=vim_info.accessInfo['userDomain'],
|
||||
project_domain_name=vim_info.accessInfo['projectDomain']
|
||||
)
|
||||
self.client = http_client.HttpClient(auth,
|
||||
service_type='orchestration')
|
||||
|
||||
def create_stack(self, fields):
|
||||
path = "stacks"
|
||||
resp, body = self.client.do_request(path, "POST",
|
||||
expected_status=[201], body=fields)
|
||||
|
||||
def delete_stack(self, stack_name):
|
||||
path = "stacks/{}".format(stack_name)
|
||||
resp, body = self.client.do_request(path, "DELETE",
|
||||
expected_status=[204, 404])
|
||||
|
||||
def get_status(self, stack_name):
|
||||
path = "stacks/{}".format(stack_name)
|
||||
resp, body = self.client.do_request(path, "GET",
|
||||
expected_status=[200, 404])
|
||||
|
||||
if resp.status_code == 404:
|
||||
return None, None
|
||||
|
||||
return (body["stack"]["stack_status"],
|
||||
body["stack"]["stack_status_reason"])
|
||||
|
||||
def get_resources(self, stack_name):
|
||||
# NOTE: Because it is necessary to get nested stack info, it is
|
||||
# necessary to specify 'nested_depth=2'.
|
||||
path = "stacks/{}/resources?nested_depth=2".format(stack_name)
|
||||
resp, body = self.client.do_request(path, "GET",
|
||||
expected_status=[200])
|
||||
|
||||
return body['resources']
|
||||
|
||||
def _wait_completion(self, stack_name, operation, complete_status,
|
||||
progress_status, failed_status, none_is_done=False):
|
||||
# NOTE: timeout is specified for each stack operation. so it is
|
||||
# not forever loop.
|
||||
def _check_status():
|
||||
status, status_reason = self.get_status(stack_name)
|
||||
if (status == complete_status or
|
||||
(status is None and none_is_done)):
|
||||
LOG.info("%s %s done.", operation, stack_name)
|
||||
raise loopingcall.LoopingCallDone()
|
||||
elif status == failed_status:
|
||||
LOG.error("% %s failed.", operation, stack_name)
|
||||
sol_title = "%s failed" % operation
|
||||
raise sol_ex.StackOperationFailed(sol_title=sol_title,
|
||||
sol_detail=status_reason)
|
||||
elif status != progress_status:
|
||||
LOG.error("%s %s failed. status: %s", operation,
|
||||
stack_name, status)
|
||||
sol_title = "%s failed" % operation
|
||||
raise sol_ex.StackOperationFailed(sol_title=sol_title,
|
||||
sol_detail='Unknown error')
|
||||
LOG.debug("%s %s %s", operation, stack_name, progress_status)
|
||||
|
||||
timer = loopingcall.FixedIntervalLoopingCall(_check_status)
|
||||
timer.start(interval=CHECK_INTERVAL).wait()
|
||||
|
||||
def wait_stack_create(self, stack_name):
|
||||
self._wait_completion(stack_name, "Stack create",
|
||||
"CREATE_COMPLETE", "CREATE_IN_PROGRESS", "CREATE_FAILED")
|
||||
|
||||
def wait_stack_delete(self, stack_name):
|
||||
self._wait_completion(stack_name, "Stack delete",
|
||||
"DELETE_COMPLETE", "DELETE_IN_PROGRESS", "DELETE_FAILED",
|
||||
none_is_done=True)
|
||||
|
||||
|
||||
def get_reses_by_types(heat_reses, types):
|
||||
return [res for res in heat_reses if res['resource_type'] in types]
|
||||
|
||||
|
||||
def get_server_reses(heat_reses):
|
||||
return get_reses_by_types(heat_reses, ['OS::Nova::Server'])
|
||||
|
||||
|
||||
def get_network_reses(heat_reses):
|
||||
return get_reses_by_types(heat_reses, ['OS::Neutron::Net'])
|
||||
|
||||
|
||||
def get_storage_reses(heat_reses):
|
||||
return get_reses_by_types(heat_reses, ['OS::Cinder::Volume'])
|
||||
|
||||
|
||||
def get_port_reses(heat_reses):
|
||||
return get_reses_by_types(heat_reses, ['OS::Neutron::Port'])
|
||||
|
||||
|
||||
def get_stack_name(inst):
|
||||
return "vnf-" + inst.id
|
535
tacker/sol_refactored/infra_drivers/openstack/openstack.py
Normal file
535
tacker/sol_refactored/infra_drivers/openstack/openstack.py
Normal file
@ -0,0 +1,535 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import eventlet
|
||||
import os
|
||||
import pickle
|
||||
import subprocess
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.infra_drivers.openstack import heat_utils
|
||||
from tacker.sol_refactored.infra_drivers.openstack import userdata_default
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class Openstack(object):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def instantiate(self, req, inst, grant_req, grant, vnfd):
|
||||
# make HOT
|
||||
fields = self.make_hot(req, inst, grant_req, grant, vnfd)
|
||||
|
||||
LOG.debug("stack fields: %s", fields)
|
||||
|
||||
# create stack
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
heat_client = heat_utils.HeatClient(vim_info)
|
||||
heat_client.create_stack(fields)
|
||||
|
||||
# wait stack created
|
||||
stack_name = fields['stack_name']
|
||||
heat_client.wait_stack_create(stack_name)
|
||||
|
||||
# get stack resource
|
||||
heat_reses = heat_client.get_resources(stack_name)
|
||||
|
||||
# make instantiated_vnf_info
|
||||
self.make_instantiated_vnf_info(req, inst, grant, vnfd, heat_reses)
|
||||
|
||||
def make_hot(self, req, inst, grant_req, grant, vnfd):
|
||||
flavour_id = req.flavourId
|
||||
hot_dict = vnfd.get_base_hot(flavour_id)
|
||||
if not hot_dict:
|
||||
raise sol_ex.BaseHOTNotDefined()
|
||||
|
||||
userdata = None
|
||||
userdata_class = None
|
||||
if req.obj_attr_is_set('additionalParams'):
|
||||
userdata = req.additionalParams.get('lcm-operation-user-data')
|
||||
userdata_class = req.additionalParams.get(
|
||||
'lcm-operation-user-data-class')
|
||||
|
||||
if userdata is None and userdata_class is None:
|
||||
LOG.debug("Processing default userdata instantiate")
|
||||
# NOTE: objects used here are dict compat.
|
||||
fields = userdata_default.DefaultUserData.instantiate(
|
||||
req, inst, grant_req, grant, vnfd.csar_dir)
|
||||
elif userdata is None or userdata_class is None:
|
||||
# Both must be specified.
|
||||
raise sol_ex.UserdataMissing()
|
||||
else:
|
||||
LOG.debug("Processing %s %s instantiate", userdata, userdata_class)
|
||||
|
||||
tmp_csar_dir = vnfd.make_tmp_csar_dir()
|
||||
script_dict = {
|
||||
'request': req.to_dict(),
|
||||
'vnf_instance': inst.to_dict(),
|
||||
'grant_request': grant_req.to_dict(),
|
||||
'grant_response': grant.to_dict(),
|
||||
'tmp_csar_dir': tmp_csar_dir
|
||||
}
|
||||
script_path = os.path.join(
|
||||
os.path.dirname(__file__), "userdata_main.py")
|
||||
|
||||
out = subprocess.run(["python3", script_path, "INSTANTIATE"],
|
||||
input=pickle.dumps(script_dict),
|
||||
capture_output=True)
|
||||
|
||||
vnfd.remove_tmp_csar_dir(tmp_csar_dir)
|
||||
|
||||
if out.returncode != 0:
|
||||
LOG.debug("execute userdata class instantiate failed: %s",
|
||||
out.stderr)
|
||||
raise sol_ex.UserdataExecutionFailed(sol_detail=out.stderr)
|
||||
|
||||
fields = pickle.loads(out.stdout)
|
||||
|
||||
stack_name = heat_utils.get_stack_name(inst)
|
||||
fields['stack_name'] = stack_name
|
||||
fields['timeout_mins'] = (
|
||||
CONF.v2_vnfm.openstack_vim_stack_create_timeout)
|
||||
|
||||
return fields
|
||||
|
||||
def _address_range_data_to_info(self, range_data):
|
||||
obj = objects.ipOverEthernetAddressInfoV2_IpAddresses_AddressRange()
|
||||
obj.minAddress = range_data.minAddress
|
||||
obj.maxAddress = range_data.maxAddress
|
||||
return obj
|
||||
|
||||
def _proto_data_to_info(self, proto_data):
|
||||
# make CpProtocolInfo (5.5.3.9b) from CpProtocolData (4.4.1.10b)
|
||||
proto_info = objects.CpProtocolInfoV2(
|
||||
layerProtocol=proto_data.layerProtocol
|
||||
)
|
||||
ip_info = objects.IpOverEthernetAddressInfoV2()
|
||||
|
||||
ip_data = proto_data.ipOverEthernet
|
||||
if ip_data.obj_attr_is_set('macAddress'):
|
||||
ip_info.macAddress = ip_data.macAddress
|
||||
if ip_data.obj_attr_is_set('segmentationId'):
|
||||
ip_info.segmentationId = ip_data.segmentationId
|
||||
if ip_data.obj_attr_is_set('ipAddresses'):
|
||||
addr_infos = []
|
||||
for addr_data in ip_data.ipAddresses:
|
||||
addr_info = objects.IpOverEthernetAddressInfoV2_IpAddresses(
|
||||
type=addr_data.type)
|
||||
if addr_data.obj_attr_is_set('fixedAddresses'):
|
||||
addr_info.addresses = addr_data.fixedAddresses
|
||||
if addr_data.obj_attr_is_set('numDynamicAddresses'):
|
||||
addr_info.isDynamic = True
|
||||
if addr_data.obj_attr_is_set('addressRange'):
|
||||
addr_info.addressRange = self._address_range_data_to_info(
|
||||
addr_data.addressRange)
|
||||
if addr_data.obj_attr_is_set('subnetId'):
|
||||
addr_info.subnetId = addr_data.subnetId
|
||||
addr_infos.append(addr_info)
|
||||
ip_info.ipAddresses = addr_infos
|
||||
|
||||
proto_info.ipOverEthernet = ip_info
|
||||
|
||||
return proto_info
|
||||
|
||||
def make_instantiated_vnf_info(self, req, inst, grant, vnfd, heat_reses):
|
||||
flavour_id = req.flavourId
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
inst_vnf_info = objects.VnfInstanceV2_InstantiatedVnfInfo(
|
||||
flavourId=flavour_id,
|
||||
vnfState='STARTED',
|
||||
)
|
||||
|
||||
# make virtualStorageResourceInfo
|
||||
storages = vnfd.get_storage_nodes(flavour_id)
|
||||
reses = heat_utils.get_storage_reses(heat_reses)
|
||||
storage_infos = []
|
||||
storage_info_to_heat_res = {}
|
||||
|
||||
for res in reses:
|
||||
storage_name = res['resource_name']
|
||||
if storage_name not in list(storages.keys()):
|
||||
# should not occur. just check for consistency.
|
||||
LOG.debug("%s not in VNFD storage definition.", storage_name)
|
||||
continue
|
||||
storage_info = objects.VirtualStorageResourceInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
virtualStorageDescId=storage_name,
|
||||
storageResource=objects.ResourceHandle(
|
||||
resourceId=res['physical_resource_id'],
|
||||
vimLevelResourceType=res['resource_type'],
|
||||
vimConnectionId=vim_info.vimId,
|
||||
)
|
||||
)
|
||||
storage_infos.append(storage_info)
|
||||
storage_info_to_heat_res[storage_info.id] = res
|
||||
|
||||
if storage_infos:
|
||||
inst_vnf_info.virtualStorageResourceInfo = storage_infos
|
||||
|
||||
# make vnfcResourceInfo
|
||||
vdus = vnfd.get_vdu_nodes(flavour_id)
|
||||
reses = heat_utils.get_server_reses(heat_reses)
|
||||
vnfc_res_infos = []
|
||||
vnfc_res_info_to_heat_res = {}
|
||||
|
||||
for res in reses:
|
||||
vdu_name = res['resource_name']
|
||||
if vdu_name not in list(vdus.keys()):
|
||||
# should not occur. just check for consistency.
|
||||
LOG.debug("%s not in VNFD VDU definition.", vdu_name)
|
||||
continue
|
||||
vnfc_res_info = objects.VnfcResourceInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vduId=vdu_name,
|
||||
computeResource=objects.ResourceHandle(
|
||||
resourceId=res['physical_resource_id'],
|
||||
vimLevelResourceType=res['resource_type'],
|
||||
vimConnectionId=vim_info.vimId,
|
||||
),
|
||||
)
|
||||
vdu_cps = vnfd.get_vdu_cps(flavour_id, vdu_name)
|
||||
cp_infos = []
|
||||
for cp in vdu_cps:
|
||||
cp_info = objects.VnfcResourceInfoV2_VnfcCpInfo(
|
||||
id=uuidutils.generate_uuid(),
|
||||
cpdId=cp,
|
||||
# vnfExtCpId or vnfLinkPortId may set later
|
||||
)
|
||||
cp_infos.append(cp_info)
|
||||
if cp_infos:
|
||||
vnfc_res_info.vnfcCpInfo = cp_infos
|
||||
|
||||
# find storages used by this
|
||||
storage_ids = []
|
||||
for storage_id, storage_res in storage_info_to_heat_res.items():
|
||||
if (vdu_name in storage_res.get('required_by', []) and
|
||||
res.get('parent_resource') ==
|
||||
storage_res.get('parent_resource')):
|
||||
storage_ids.append(storage_id)
|
||||
if storage_ids:
|
||||
vnfc_res_info.storageResourceIds = storage_ids
|
||||
|
||||
vnfc_res_infos.append(vnfc_res_info)
|
||||
vnfc_res_info_to_heat_res[vnfc_res_info.id] = res
|
||||
|
||||
if vnfc_res_infos:
|
||||
inst_vnf_info.vnfcResourceInfo = vnfc_res_infos
|
||||
|
||||
# make vnfVirtualLinkResourceInfo
|
||||
vls = vnfd.get_virtual_link_nodes(flavour_id)
|
||||
reses = heat_utils.get_network_reses(heat_reses)
|
||||
vnf_vl_infos = []
|
||||
vnf_vl_info_to_heat_res = {}
|
||||
|
||||
for res in reses:
|
||||
vl_name = res['resource_name']
|
||||
if vl_name not in list(vls.keys()):
|
||||
# should not occur. just check for consistency.
|
||||
LOG.debug("%s not in VNFD VL definition.", vl_name)
|
||||
continue
|
||||
vnf_vl_info = objects.VnfVirtualLinkResourceInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnfVirtualLinkDescId=vl_name,
|
||||
networkResource=objects.ResourceHandle(
|
||||
resourceId=res['physical_resource_id'],
|
||||
vimLevelResourceType=res['resource_type'],
|
||||
vimConnectionId=vim_info.vimId,
|
||||
),
|
||||
# vnfLinkPorts set later
|
||||
)
|
||||
vnf_vl_infos.append(vnf_vl_info)
|
||||
vnf_vl_info_to_heat_res[vnf_vl_info.id] = res
|
||||
|
||||
if vnf_vl_infos:
|
||||
inst_vnf_info.vnfVirtualLinkResourceInfo = vnf_vl_infos
|
||||
|
||||
# make extVirtualLinkInfo
|
||||
ext_vls = []
|
||||
req_ext_vls = []
|
||||
ext_cp_infos = []
|
||||
if grant.obj_attr_is_set('extVirtualLinks'):
|
||||
req_ext_vls = grant.extVirtualLinks
|
||||
elif req.obj_attr_is_set('extVirtualLinks'):
|
||||
req_ext_vls = req.extVirtualLinks
|
||||
|
||||
for req_ext_vl in req_ext_vls:
|
||||
ext_vl = objects.ExtVirtualLinkInfoV2(
|
||||
id=req_ext_vl.id,
|
||||
resourceHandle=objects.ResourceHandle(
|
||||
id=uuidutils.generate_uuid(),
|
||||
resourceId=req_ext_vl.resourceId
|
||||
),
|
||||
currentVnfExtCpData=req_ext_vl.extCps
|
||||
)
|
||||
if req_ext_vl.obj_attr_is_set('vimConnectionId'):
|
||||
ext_vl.resourceHandle.vimConnectionId = (
|
||||
req_ext_vl.vimConnectionId)
|
||||
if req_ext_vl.obj_attr_is_set('resourceProviderId'):
|
||||
ext_vl.resourceHandle.resourceProviderId = (
|
||||
req_ext_vl.resourceProviderId)
|
||||
|
||||
ext_vls.append(ext_vl)
|
||||
|
||||
if not req_ext_vl.obj_attr_is_set('extLinkPorts'):
|
||||
continue
|
||||
link_ports = []
|
||||
for req_link_port in req_ext_vl.extLinkPorts:
|
||||
link_port = objects.ExtLinkPortInfoV2(
|
||||
id=req_link_port.id,
|
||||
resourceHandle=req_link_port.resourceHandle,
|
||||
)
|
||||
ext_cp_info = objects.VnfExtCpInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
extLinkPortId=link_port.id
|
||||
# associatedVnfcCpId may set later
|
||||
)
|
||||
link_port.cpInstanceId = ext_cp_info.id
|
||||
|
||||
for ext_cp in req_ext_vl.extCps:
|
||||
found = False
|
||||
for key, cp_conf in ext_cp.cpConfig.items():
|
||||
if (cp_conf.obj_attr_is_set('linkPortId') and
|
||||
cp_conf.linkPortId == link_port.id):
|
||||
ext_cp_info.cpdId = ext_cp.cpdId
|
||||
ext_cp_info.cpConfigId = key
|
||||
# NOTE: cpProtocolInfo can't be filled
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
|
||||
link_ports.append(link_port)
|
||||
ext_cp_infos.append(ext_cp_info)
|
||||
|
||||
ext_vl.extLinkPorts = link_ports
|
||||
|
||||
if ext_vls:
|
||||
inst_vnf_info.extVirtualLinkInfo = ext_vls
|
||||
# ext_cp_infos set later
|
||||
|
||||
# make extManagedVirtualLinkInfo
|
||||
ext_mgd_vls = []
|
||||
req_mgd_vls = []
|
||||
if grant.obj_attr_is_set('extManagedVirtualLinks'):
|
||||
req_mgd_vls = grant.extManagedVirtualLinks
|
||||
elif req.obj_attr_is_set('extManagedVirtualLinks'):
|
||||
req_mgd_vls = req.extManagedVirtualLinks
|
||||
|
||||
for req_mgd_vl in req_mgd_vls:
|
||||
ext_mgd_vl = objects.ExtManagedVirtualLinkInfoV2(
|
||||
id=req_mgd_vl.id,
|
||||
vnfVirtualLinkDescId=req_mgd_vl.vnfVirtualLinkDescId,
|
||||
networkResource=objects.ResourceHandle(
|
||||
id=uuidutils.generate_uuid(),
|
||||
resourceId=req_mgd_vl.resourceId
|
||||
),
|
||||
)
|
||||
if req_mgd_vl.obj_attr_is_set('vimConnectionId'):
|
||||
ext_mgd_vl.networkResource.vimConnectionId = (
|
||||
req_mgd_vl.vimConnectionId)
|
||||
if req_mgd_vl.obj_attr_is_set('resourceProviderId'):
|
||||
ext_mgd_vl.networkResource.resourceProviderId = (
|
||||
req_mgd_vl.resourceProviderId)
|
||||
|
||||
ext_mgd_vls.append(ext_mgd_vl)
|
||||
|
||||
if not req_mgd_vl.obj_attr_is_set('vnfLinkPort'):
|
||||
continue
|
||||
link_ports = []
|
||||
for req_link_port in req_mgd_vl.vnfLinkPort:
|
||||
link_port = objects.VnfLinkPortInfoV2(
|
||||
id=req_link_port.vnfLinkPortId,
|
||||
resourceHandle=req_link_port.resourceHandle,
|
||||
cpInstanceType='EXT_CP', # may be changed later
|
||||
# cpInstanceId may set later
|
||||
)
|
||||
link_ports.append(link_port)
|
||||
ext_mgd_vl.vnfLinkPort = link_ports
|
||||
|
||||
if ext_mgd_vls:
|
||||
inst_vnf_info.extManagedVirtualLinkInfo = ext_mgd_vls
|
||||
|
||||
# make CP related infos
|
||||
vdu_cps = vnfd.get_vducp_nodes(flavour_id)
|
||||
reses = heat_utils.get_port_reses(heat_reses)
|
||||
|
||||
for res in reses:
|
||||
cp_name = res['resource_name']
|
||||
if cp_name not in list(vdu_cps.keys()):
|
||||
# should not occur. just check for consistency.
|
||||
LOG.debug("%s not in VNFD CP definition.", cp_name)
|
||||
continue
|
||||
vl_name = vnfd.get_vl_name_from_cp(flavour_id, vdu_cps[cp_name])
|
||||
is_external = False
|
||||
if vl_name is None: # extVirtualLink
|
||||
is_external = True
|
||||
|
||||
# NOTE: object is diffrent from other vl types
|
||||
vnf_link_port = objects.ExtLinkPortInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
resourceHandle=objects.ResourceHandle(
|
||||
resourceId=res['physical_resource_id'],
|
||||
vimLevelResourceType=res['resource_type'],
|
||||
vimConnectionId=vim_info.vimId,
|
||||
)
|
||||
)
|
||||
ext_cp_info = objects.VnfExtCpInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
extLinkPortId=vnf_link_port.id,
|
||||
cpdId=cp_name
|
||||
# associatedVnfcCpId may set later
|
||||
)
|
||||
vnf_link_port.cpInstanceId = ext_cp_info.id
|
||||
|
||||
found = False
|
||||
for ext_vl in ext_vls:
|
||||
for ext_cp in ext_vl.currentVnfExtCpData:
|
||||
if ext_cp.cpdId == cp_name:
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
|
||||
if found:
|
||||
if ext_vl.obj_attr_is_set('extLinkPorts'):
|
||||
ext_vl.extLinkPorts.append(vnf_link_port)
|
||||
else:
|
||||
ext_vl.extLinkPorts = [vnf_link_port]
|
||||
|
||||
for key, cp_conf in ext_cp.cpConfig.items():
|
||||
# NOTE: it is assumed that there is one item
|
||||
# (with cpProtocolData) of cpConfig at the moment.
|
||||
if cp_conf.obj_attr_is_set('cpProtocolData'):
|
||||
proto_infos = []
|
||||
for proto_data in cp_conf.cpProtocolData:
|
||||
proto_info = self._proto_data_to_info(
|
||||
proto_data)
|
||||
proto_infos.append(proto_info)
|
||||
ext_cp_info.cpProtocolInfo = proto_infos
|
||||
ext_cp_info.cpConfigId = key
|
||||
break
|
||||
|
||||
ext_cp_infos.append(ext_cp_info)
|
||||
else:
|
||||
# Internal VL or extManagedVirtualLink
|
||||
vnf_link_port = objects.VnfLinkPortInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
resourceHandle=objects.ResourceHandle(
|
||||
resourceId=res['physical_resource_id'],
|
||||
vimLevelResourceType=res['resource_type'],
|
||||
vimConnectionId=vim_info.vimId,
|
||||
cpInstanceType='EXT_CP' # may be changed later
|
||||
)
|
||||
)
|
||||
|
||||
is_internal = False
|
||||
for vnf_vl_info in vnf_vl_infos:
|
||||
if vnf_vl_info.vnfVirtualLinkDescId == vl_name:
|
||||
# Internal VL
|
||||
is_internal = True
|
||||
if vnf_vl_info.obj_attr_is_set('vnfLinkPorts'):
|
||||
vnf_vl_info.vnfLinkPorts.append(vnf_link_port)
|
||||
else:
|
||||
vnf_vl_info.vnfLinkPorts = [vnf_link_port]
|
||||
|
||||
if not is_internal:
|
||||
# extManagedVirtualLink
|
||||
for ext_mgd_vl in ext_mgd_vls:
|
||||
# should be found
|
||||
if ext_mgd_vl.vnfVirtualLinkDescId == vl_name:
|
||||
if ext_mgd_vl.obj_attr_is_set('vnfLinkPorts'):
|
||||
ext_mgd_vl.vnfLinkPorts.append(vnf_link_port)
|
||||
else:
|
||||
ext_mgd_vl.vnfLinkPorts = [vnf_link_port]
|
||||
|
||||
# link to vnfcResourceInfo.vnfcCpInfo
|
||||
for vnfc_res_info in vnfc_res_infos:
|
||||
if not vnfc_res_info.obj_attr_is_set('vnfcCpInfo'):
|
||||
continue
|
||||
vnfc_res = vnfc_res_info_to_heat_res[vnfc_res_info.id]
|
||||
vdu_name = vnfc_res_info.vduId
|
||||
if not (vdu_name in res.get('required_by', []) and
|
||||
res.get('parent_resource') ==
|
||||
vnfc_res.get('parent_resource')):
|
||||
continue
|
||||
for vnfc_cp in vnfc_res_info.vnfcCpInfo:
|
||||
if vnfc_cp.cpdId != cp_name:
|
||||
continue
|
||||
if is_external:
|
||||
vnfc_cp.vnfExtCpId = vnf_link_port.cpInstanceId
|
||||
for ext_cp_info in ext_cp_infos:
|
||||
if ext_cp_info.extLinkPortId == vnf_link_port.id:
|
||||
ext_cp_info.associatedVnfcCpId = vnfc_cp.id
|
||||
break
|
||||
else:
|
||||
vnf_link_port.cpInstanceType = 'VNFC_CP'
|
||||
vnf_link_port.cpInstanceId = vnfc_cp.id
|
||||
vnfc_cp.vnfLinkPortId = vnf_link_port.id
|
||||
break
|
||||
|
||||
if ext_cp_infos:
|
||||
inst_vnf_info.extCpInfo = ext_cp_infos
|
||||
|
||||
# NOTE: The followings are not handled at the moment.
|
||||
# - handle tosca.nodes.nfv.VnfExtCp type
|
||||
# Note that there is no example in current tacker examples which use
|
||||
# tosca.nodes.nfv.VnfExtCp type and related BaseHOT definitions.
|
||||
# - in the case of specifying linkPortId of extVirtualLinks or
|
||||
# extManagedVirtualLinks, the link of vnfcCpInfo is not handled
|
||||
# because the association of compute resource and port resource
|
||||
# is not identified.
|
||||
|
||||
# make vnfcInfo
|
||||
# NOTE: vnfcInfo only exists in SOL002
|
||||
vnfc_infos = []
|
||||
for vnfc_res_info in vnfc_res_infos:
|
||||
vnfc_info = objects.VnfcInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vduId=vnfc_res_info.vduId,
|
||||
vnfcResourceInfoId=vnfc_res_info.id,
|
||||
vnfcState='STARTED'
|
||||
)
|
||||
vnfc_infos.append(vnfc_info)
|
||||
|
||||
if vnfc_infos:
|
||||
inst_vnf_info.vnfcInfo = vnfc_infos
|
||||
|
||||
inst.instantiatedVnfInfo = inst_vnf_info
|
||||
|
||||
def terminate(self, req, inst, grant_req, grant, vnfd):
|
||||
if req.terminationType == 'GRACEFUL':
|
||||
timeout = CONF.v2_vnfm.default_graceful_termination_timeout
|
||||
if req.obj_attr_is_set('gracefulTerminationTimeout'):
|
||||
timeout = req.gracefulTerminationTimeout
|
||||
eventlet.sleep(timeout)
|
||||
|
||||
# delete stack
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
heat_client = heat_utils.HeatClient(vim_info)
|
||||
stack_name = heat_utils.get_stack_name(inst)
|
||||
heat_client.delete_stack(stack_name)
|
||||
heat_client.wait_stack_delete(stack_name)
|
@ -0,0 +1,86 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import yaml
|
||||
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.infra_drivers.openstack import userdata_utils
|
||||
|
||||
|
||||
class DefaultUserData(userdata_utils.AbstractUserData):
|
||||
|
||||
@staticmethod
|
||||
def instantiate(req, inst, grant_req, grant, tmp_csar_dir):
|
||||
vnfd = userdata_utils.get_vnfd(inst['vnfdId'], tmp_csar_dir)
|
||||
flavour_id = req['flavourId']
|
||||
|
||||
hot_dict = vnfd.get_base_hot(flavour_id)
|
||||
top_hot = hot_dict['template']
|
||||
|
||||
nfv_dict = userdata_utils.init_nfv_dict(top_hot)
|
||||
|
||||
vdus = nfv_dict.get('VDU', {})
|
||||
for vdu_name, vdu_value in vdus.items():
|
||||
if 'computeFlavourId' in vdu_value:
|
||||
vdu_value['computeFlavourId'] = (
|
||||
userdata_utils.get_param_flavor(
|
||||
vdu_name, req, vnfd, grant))
|
||||
if 'vcImageId' in vdu_value:
|
||||
vdu_value['vcImageId'] = userdata_utils.get_param_image(
|
||||
vdu_name, req, vnfd, grant)
|
||||
if 'locationConstraints' in vdu_value:
|
||||
vdu_value['locationConstraints'] = (
|
||||
userdata_utils.get_param_zone(
|
||||
vdu_name, grant_req, grant))
|
||||
|
||||
cps = nfv_dict.get('CP', {})
|
||||
for cp_name, cp_value in cps.items():
|
||||
if 'network' in cp_value:
|
||||
cp_value['network'] = userdata_utils.get_param_network(
|
||||
cp_name, grant, req)
|
||||
if 'fixed_ips' in cp_value:
|
||||
ext_fixed_ips = userdata_utils.get_param_fixed_ips(
|
||||
cp_name, grant, req)
|
||||
fixed_ips = []
|
||||
for i in range(len(ext_fixed_ips)):
|
||||
if i not in cp_value['fixed_ips']:
|
||||
break
|
||||
ips_i = cp_value['fixed_ips'][i]
|
||||
if 'subnet' in ips_i:
|
||||
ips_i['subnet'] = ext_fixed_ips[i].get('subnet')
|
||||
if 'ip_address' in ips_i:
|
||||
ips_i['ip_address'] = ext_fixed_ips[i].get(
|
||||
'ip_address')
|
||||
fixed_ips.append(ips_i)
|
||||
cp_value['fixed_ips'] = fixed_ips
|
||||
|
||||
userdata_utils.apply_ext_managed_vls(top_hot, req, grant)
|
||||
|
||||
if 'nfv' in req.get('additionalParams', {}):
|
||||
nfv_dict = inst_utils.json_merge_patch(nfv_dict,
|
||||
req['additionalParams']['nfv'])
|
||||
if 'nfv' in grant.get('additionalParams', {}):
|
||||
nfv_dict = inst_utils.json_merge_patch(nfv_dict,
|
||||
grant['additionalParams']['nfv'])
|
||||
|
||||
fields = {
|
||||
'template': yaml.safe_dump(top_hot),
|
||||
'parameters': {'nfv': nfv_dict},
|
||||
'files': {}
|
||||
}
|
||||
for key, value in hot_dict.get('files', {}).items():
|
||||
fields['files'][key] = yaml.safe_dump(value)
|
||||
|
||||
return fields
|
@ -0,0 +1,59 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import importlib
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
|
||||
def main(operation):
|
||||
script_dict = pickle.load(sys.stdin.buffer)
|
||||
|
||||
req = script_dict['request']
|
||||
inst = script_dict['vnf_instance']
|
||||
grant_req = script_dict['grant_request']
|
||||
grant = script_dict['grant_response']
|
||||
tmp_csar_dir = script_dict['tmp_csar_dir']
|
||||
|
||||
additional_params = req['additionalParams']
|
||||
userdata_path = additional_params['lcm-operation-user-data']
|
||||
userdata_class = additional_params['lcm-operation-user-data-class']
|
||||
|
||||
sys.path.append(tmp_csar_dir)
|
||||
class_module = os.path.splitext(
|
||||
userdata_path.lstrip('./'))[0].replace('/', '.')
|
||||
module = importlib.import_module(class_module)
|
||||
klass = getattr(module, userdata_class)
|
||||
|
||||
if operation == 'INSTANTIATE':
|
||||
stack_dict = klass.instantiate(
|
||||
req, inst, grant_req, grant, tmp_csar_dir)
|
||||
else:
|
||||
raise Exception("Unknown operation")
|
||||
|
||||
pickle.dump(stack_dict, sys.stdout.buffer)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main(sys.argv[1])
|
||||
os._exit(0)
|
||||
except Exception:
|
||||
sys.stderr.write(traceback.format_exc())
|
||||
sys.stderr.flush()
|
||||
os._exit(1)
|
216
tacker/sol_refactored/infra_drivers/openstack/userdata_utils.py
Normal file
216
tacker/sol_refactored/infra_drivers/openstack/userdata_utils.py
Normal file
@ -0,0 +1,216 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import abc
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import vnfd_utils
|
||||
|
||||
|
||||
class AbstractUserData(metaclass=abc.ABCMeta):
|
||||
|
||||
@staticmethod
|
||||
@abc.abstractmethod
|
||||
def instantiate(req, inst, grant_req, grant, tmp_csar_dir):
|
||||
"""Definition of instantiate method
|
||||
|
||||
Args:
|
||||
req: InstantiateVnfRequest dict
|
||||
inst: VnfInstance dict
|
||||
grant_req: GrantRequest dict
|
||||
grant: Grant dict
|
||||
tmp_csar_dir: directory path that csar contents are extracted
|
||||
|
||||
Returns:
|
||||
dict of parameters for create heat stack.
|
||||
see the example of userdata_default.py.
|
||||
"""
|
||||
raise sol_ex.UserDataClassNotImplemented()
|
||||
|
||||
|
||||
def get_vnfd(vnfd_id, csar_dir):
|
||||
vnfd = vnfd_utils.Vnfd(vnfd_id)
|
||||
vnfd.init_from_csar_dir(csar_dir)
|
||||
return vnfd
|
||||
|
||||
|
||||
def init_nfv_dict(hot_template):
|
||||
get_params = []
|
||||
|
||||
def _get_get_param(prop):
|
||||
if isinstance(prop, dict):
|
||||
for key, value in prop.items():
|
||||
if key == 'get_param':
|
||||
get_params.append(value)
|
||||
else:
|
||||
_get_get_param(value)
|
||||
elif isinstance(prop, list):
|
||||
for value in prop:
|
||||
_get_get_param(value)
|
||||
|
||||
for res in hot_template.get('resources', {}).values():
|
||||
_get_get_param(res.get('properties', {}))
|
||||
|
||||
nfv = {}
|
||||
|
||||
for param in get_params:
|
||||
if (not isinstance(param, list) or len(param) < 4 or
|
||||
param[0] != 'nfv'):
|
||||
continue
|
||||
parent = nfv
|
||||
for item in param[1:-1]:
|
||||
parent.setdefault(item, {})
|
||||
parent = parent[item]
|
||||
parent[param[-1]] = None
|
||||
|
||||
# TODO(oda-g): enhance to handle list
|
||||
# NOTE: List is not considered here and only 'fixed_ips' is treated as
|
||||
# list in userdata_default.py at the moment.
|
||||
# Note that if handling list is enhanced, userdata_default.py is
|
||||
# necessary to modify.
|
||||
return nfv
|
||||
|
||||
|
||||
def get_param_flavor(vdu_name, req, vnfd, grant):
|
||||
# try to get from grant
|
||||
if 'vimAssets' in grant:
|
||||
assets = grant['vimAssets']
|
||||
if 'computeResourceFlavours' in assets:
|
||||
flavours = assets['computeResourceFlavours']
|
||||
for flavour in flavours:
|
||||
if flavour['vnfdVirtualComputeDescId'] == vdu_name:
|
||||
return flavour['vimFlavourId']
|
||||
|
||||
# if specified in VNFD, use it
|
||||
# NOTE: if not found. parameter is set to None.
|
||||
# may be error when stack create
|
||||
return vnfd.get_compute_flavor(req['flavourId'], vdu_name)
|
||||
|
||||
|
||||
def get_param_image(vdu_name, req, vnfd, grant):
|
||||
# try to get from grant
|
||||
if 'vimAssets' in grant:
|
||||
assets = grant['vimAssets']
|
||||
if 'softwareImages' in assets:
|
||||
images = assets['softwareImages']
|
||||
for image in images:
|
||||
if image['vnfdSoftwareImageId'] == vdu_name:
|
||||
return image['vimSoftwareImageId']
|
||||
|
||||
# if specified in VNFD, use it
|
||||
# NOTE: if not found. parameter is set to None.
|
||||
# may be error when stack create
|
||||
sw_images = vnfd.get_sw_image(req['flavourId'])
|
||||
for name, image in sw_images.items():
|
||||
if name == vdu_name:
|
||||
return image
|
||||
|
||||
|
||||
def get_param_zone(vdu_name, grant_req, grant):
|
||||
if 'zones' not in grant or 'addResources' not in grant:
|
||||
return
|
||||
|
||||
for res in grant['addResources']:
|
||||
if 'zoneId' not in res:
|
||||
continue
|
||||
for req_res in grant_req['addResources']:
|
||||
if req_res['id'] == res['resourceDefinitionId']:
|
||||
if req_res.get('resourceTemplateId') == vdu_name:
|
||||
for zone in grant['zones']:
|
||||
if zone['id'] == res['zoneId']: # must be found
|
||||
return zone['zoneId']
|
||||
|
||||
|
||||
def _get_fixed_ips_from_extcp(extcp):
|
||||
fixed_ips = []
|
||||
for cp_conf in extcp['cpConfig'].values():
|
||||
if 'cpProtocolData' not in cp_conf:
|
||||
continue
|
||||
for prot_data in cp_conf['cpProtocolData']:
|
||||
if 'ipOverEthernet' not in prot_data:
|
||||
continue
|
||||
if 'ipAddresses' not in prot_data['ipOverEthernet']:
|
||||
continue
|
||||
for ip in prot_data['ipOverEthernet']['ipAddresses']:
|
||||
data = {}
|
||||
if 'fixedAddresses' in ip:
|
||||
# pick up only one ip address
|
||||
data['ip_address'] = str(ip['fixedAddresses'][0])
|
||||
if 'subnetId' in ip:
|
||||
data['subnet'] = ip['subnetId']
|
||||
if data:
|
||||
fixed_ips.append(data)
|
||||
return fixed_ips
|
||||
|
||||
|
||||
def get_param_network(cp_name, grant, req):
|
||||
# see grant first then instantiateVnfRequest
|
||||
vls = grant.get('extVirtualLinks', []) + req.get('extVirtualLinks', [])
|
||||
for vl in vls:
|
||||
for extcp in vl['extCps']:
|
||||
if extcp['cpdId'] == cp_name:
|
||||
return vl['resourceId']
|
||||
|
||||
|
||||
def get_param_fixed_ips(cp_name, grant, req):
|
||||
# see grant first then instantiateVnfRequest
|
||||
vls = grant.get('extVirtualLinks', []) + req.get('extVirtualLinks', [])
|
||||
for vl in vls:
|
||||
for extcp in vl['extCps']:
|
||||
if extcp['cpdId'] == cp_name:
|
||||
return _get_fixed_ips_from_extcp(extcp)
|
||||
|
||||
|
||||
def apply_ext_managed_vls(hot_dict, req, grant):
|
||||
# see grant first then instantiateVnfRequest
|
||||
mgd_vls = (grant.get('extManagedVirtualLinks', []) +
|
||||
req.get('extManagedVirtualLinks', []))
|
||||
|
||||
# NOTE: refer HOT only here, not refer VNFD.
|
||||
# HOT and VNFD must be consistent.
|
||||
|
||||
for mgd_vl in mgd_vls:
|
||||
vl_name = mgd_vl['vnfVirtualLinkDescId']
|
||||
network_id = mgd_vl['resourceId']
|
||||
get_res = {'get_resource': vl_name}
|
||||
|
||||
def _change(item):
|
||||
if not isinstance(item, dict):
|
||||
return
|
||||
for key, value in item.items():
|
||||
if value == get_res:
|
||||
item[key] = network_id
|
||||
else:
|
||||
_change(value)
|
||||
|
||||
del_reses = []
|
||||
for res_name, res_data in hot_dict.get('resources', {}).items():
|
||||
# delete network definition
|
||||
if res_name == vl_name:
|
||||
del_reses.append(res_name)
|
||||
|
||||
# delete subnet definition
|
||||
if res_data['type'] == 'OS::Neutron::Subnet':
|
||||
net = (res_data.get('properties', {})
|
||||
.get('network', {})
|
||||
.get('get_resource'))
|
||||
if net == vl_name:
|
||||
del_reses.append(res_name)
|
||||
|
||||
# change '{get_resource: vl_name}' to network_id
|
||||
_change(res_data)
|
||||
|
||||
for res_name in del_reses:
|
||||
hot_dict['resources'].pop(res_name)
|
67
tacker/sol_refactored/mgmt_drivers/sample_script.py
Normal file
67
tacker/sol_refactored/mgmt_drivers/sample_script.py
Normal file
@ -0,0 +1,67 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
|
||||
class SampleScript(object):
|
||||
|
||||
def __init__(self, req, inst, grant_req, grant, csar_dir):
|
||||
self.req = req
|
||||
self.inst = inst
|
||||
self.grant_req = grant_req
|
||||
self.grant = grant
|
||||
self.csar_dir = csar_dir
|
||||
|
||||
def instantiate_start(self):
|
||||
pass
|
||||
|
||||
def instantiate_end(self):
|
||||
pass
|
||||
|
||||
def terminate_start(self):
|
||||
pass
|
||||
|
||||
def terminate_end(self):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
script_dict = pickle.load(sys.stdin.buffer)
|
||||
|
||||
operation = script_dict['operation']
|
||||
req = script_dict['request']
|
||||
inst = script_dict['vnf_instance']
|
||||
grant_req = script_dict['grant_request']
|
||||
grant = script_dict['grant_response']
|
||||
csar_dir = script_dict['tmp_csar_dir']
|
||||
|
||||
script = SampleScript(req, inst, grant_req, grant, csar_dir)
|
||||
try:
|
||||
getattr(script, operation)()
|
||||
except AttributeError:
|
||||
raise Exception("{} is not included in the script.".format(operation))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
os._exit(0)
|
||||
except Exception as ex:
|
||||
sys.stderr.write(str(ex))
|
||||
sys.stderr.flush()
|
||||
os._exit(1)
|
58
tacker/sol_refactored/nfvo/glance_utils.py
Normal file
58
tacker/sol_refactored/nfvo/glance_utils.py
Normal file
@ -0,0 +1,58 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from openstack import connection
|
||||
from openstack import exceptions as os_ex
|
||||
from oslo_log import log as logging
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GlanceClient(object):
|
||||
|
||||
def __init__(self, vim_info):
|
||||
auth = dict(
|
||||
auth_url=vim_info.interfaceInfo['endpoint'],
|
||||
username=vim_info.accessInfo['username'],
|
||||
password=vim_info.accessInfo['password'],
|
||||
project_name=vim_info.accessInfo['project'],
|
||||
user_domain_name=vim_info.accessInfo['userDomain'],
|
||||
project_domain_name=vim_info.accessInfo['projectDomain']
|
||||
)
|
||||
self.conn = connection.Connection(
|
||||
region_name=vim_info.accessInfo.get('region'),
|
||||
auth=auth,
|
||||
identity_interface='internal')
|
||||
|
||||
def create_image(self, name, **fields):
|
||||
return self.conn.image.create_image(
|
||||
name, allow_duplicates=True, **fields)
|
||||
|
||||
def list_images(self, **params):
|
||||
return self.conn.image.images(**params)
|
||||
|
||||
def get_image(self, image_id):
|
||||
try:
|
||||
return self.conn.image.get_image(image_id)
|
||||
except os_ex.ResourceNotFound:
|
||||
LOG.debug("image %s not found.", image_id)
|
||||
|
||||
def delete_image(self, image_id):
|
||||
try:
|
||||
return self.conn.image.delete_image(image_id)
|
||||
except os_ex.ResourceNotFound:
|
||||
LOG.debug("image %s not found.", image_id)
|
305
tacker/sol_refactored/nfvo/local_nfvo.py
Normal file
305
tacker/sol_refactored/nfvo/local_nfvo.py
Normal file
@ -0,0 +1,305 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.common import utils as common_utils
|
||||
import tacker.conf
|
||||
from tacker.objects import fields
|
||||
from tacker.objects import vnf_package
|
||||
from tacker.objects import vnf_package_vnfd
|
||||
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import vim_utils
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.common import vnfd_utils
|
||||
from tacker.sol_refactored.nfvo import glance_utils
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.sol_refactored.objects.v2 import fields as v2_fields
|
||||
|
||||
|
||||
CONF = tacker.conf.CONF
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
# NOTE:
|
||||
# It is NFVO implementation used when an external NFVO is not used.
|
||||
# It implements only functions necessary for vnflcm v2.
|
||||
# It uses original tacker vnfpkgm v1 for vnf package managemnet
|
||||
# and adds grant functions.
|
||||
|
||||
|
||||
class LocalNfvo(object):
|
||||
|
||||
def __init__(self):
|
||||
self.inst_vim_info = {}
|
||||
|
||||
def onboarded_show(self, context, id):
|
||||
pkg_vnfd = vnf_package_vnfd.VnfPackageVnfd().get_by_vnfdId(
|
||||
context, id)
|
||||
if not pkg_vnfd:
|
||||
raise sol_ex.VnfdIdNotFound(vnfd_id=id)
|
||||
|
||||
vnf_pkg = vnf_package.VnfPackage().get_by_id(
|
||||
context, pkg_vnfd.package_uuid)
|
||||
if not vnf_pkg:
|
||||
# never happen. just for code consistency.
|
||||
raise sol_ex.VnfdIdNotFound(vnfd_id=id)
|
||||
|
||||
if (vnf_pkg.onboarding_state !=
|
||||
fields.PackageOnboardingStateType.ONBOARDED):
|
||||
# NOTE: API itself should be return 409 but it is used by
|
||||
# only vnf_instance create and will be converted to 422 in it.
|
||||
raise sol_ex.VnfdIdNotFound(vnfd_id=id)
|
||||
|
||||
# NOTE:
|
||||
# This is used for vnf_instance create at the moment.
|
||||
# It is set only attributes necessary for vnf_instance create.
|
||||
res = objects.VnfPkgInfoV2(
|
||||
id=pkg_vnfd.package_uuid,
|
||||
vnfdId=pkg_vnfd.vnfd_id,
|
||||
vnfProvider=pkg_vnfd.vnf_provider,
|
||||
vnfProductName=pkg_vnfd.vnf_product_name,
|
||||
vnfSoftwareVersion=pkg_vnfd.vnf_software_version,
|
||||
vnfdVersion=pkg_vnfd.vnfd_version,
|
||||
operationalState=vnf_pkg.operational_state
|
||||
)
|
||||
|
||||
return res
|
||||
|
||||
def get_csar_dir(self, context, vnfd_id):
|
||||
pkg_vnfd = vnf_package_vnfd.VnfPackageVnfd().get_by_vnfdId(
|
||||
context, vnfd_id)
|
||||
if not pkg_vnfd:
|
||||
raise sol_ex.VnfdIdNotFound(vnfd_id=vnfd_id)
|
||||
|
||||
csar_dir = os.path.join(CONF.vnf_package.vnf_package_csar_path,
|
||||
pkg_vnfd.package_uuid)
|
||||
if not os.path.isdir(csar_dir):
|
||||
raise sol_ex.VnfdIdNotFound(vnfd_id=vnfd_id)
|
||||
|
||||
return csar_dir
|
||||
|
||||
def get_vnfd(self, context, vnfd_id):
|
||||
vnfd = vnfd_utils.Vnfd(vnfd_id)
|
||||
vnfd.init_from_csar_dir(self.get_csar_dir(context, vnfd_id))
|
||||
return vnfd
|
||||
|
||||
def _glance_create_image(self, vim_info, vnfd, sw_data, inst_id):
|
||||
min_disk = 0
|
||||
if 'min_disk' in sw_data:
|
||||
min_disk = common_utils.MemoryUnit.convert_unit_size_to_num(
|
||||
sw_data['min_disk'], 'GB')
|
||||
|
||||
min_ram = 0
|
||||
if 'min_ram' in sw_data:
|
||||
min_ram = common_utils.MemoryUnit.convert_unit_size_to_num(
|
||||
sw_data['min_ram'], 'MB')
|
||||
|
||||
filename = os.path.join(vnfd.csar_dir,
|
||||
sw_data['file'].split('../')[-1])
|
||||
|
||||
# NOTE: use tag to find to delete images when terminate vnf instance.
|
||||
create_args = {
|
||||
'min_disk': min_disk,
|
||||
'min_ram': min_ram,
|
||||
'disk_format': sw_data.get('disk_format'),
|
||||
'container_format': sw_data.get('container_format'),
|
||||
'filename': filename,
|
||||
'visibility': 'private',
|
||||
'tags': [inst_id]
|
||||
}
|
||||
|
||||
glance_client = glance_utils.GlanceClient(vim_info)
|
||||
image = glance_client.create_image(sw_data['name'], **create_args)
|
||||
|
||||
LOG.debug("image created name: %s id: %s", sw_data['name'], image.id)
|
||||
|
||||
return image.id
|
||||
|
||||
def _get_vim_info(self, context, grant_req):
|
||||
lcmocc = lcmocc_utils.get_lcmocc(context, grant_req.vnfLcmOpOccId)
|
||||
inst_req = lcmocc.operationParams
|
||||
if inst_req.obj_attr_is_set('vimConnectionInfo'):
|
||||
return inst_utils.select_vim_info(inst_req.vimConnectionInfo)
|
||||
|
||||
# NOTE: exception is not raised.
|
||||
return vim_utils.get_default_vim(context)
|
||||
|
||||
def instantiate_grant(self, context, grant_req, grant_res):
|
||||
# handle ZoneInfo
|
||||
zone_list = config.CONF.v2_nfvo.test_grant_zone_list
|
||||
zone_id = None
|
||||
if len(zone_list) > 0:
|
||||
zone_infos = []
|
||||
for zone in zone_list:
|
||||
zone_info = objects.ZoneInfoV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
zoneId=zone
|
||||
)
|
||||
zone_infos.append(zone_info)
|
||||
grant_res.zones = zone_infos
|
||||
zone_id = zone_infos[0].id
|
||||
|
||||
# handle addResources.
|
||||
# only copy req to res. i.e. grant all.
|
||||
attr = 'addResources'
|
||||
if grant_req.obj_attr_is_set(attr):
|
||||
add_res = []
|
||||
for res_def in grant_req[attr]:
|
||||
g_info = objects.GrantInfoV1(
|
||||
resourceDefinitionId=res_def.id
|
||||
)
|
||||
if zone_id is not None and res_def.type == 'COMPUTE':
|
||||
g_info.zoneId = zone_id
|
||||
add_res.append(g_info)
|
||||
grant_res[attr] = add_res
|
||||
|
||||
# handle vimAssets
|
||||
# if there is an artifact, create glance image.
|
||||
vnfd = self.get_vnfd(context, grant_req.vnfdId)
|
||||
sw_image_data = vnfd.get_sw_image_data(grant_req.flavourId)
|
||||
vim_sw_images = []
|
||||
for res_id, sw_data in sw_image_data.items():
|
||||
if 'file' in sw_data:
|
||||
# if artifact is specified, create glance image.
|
||||
# it may fail. catch exception and raise 403(not granted)
|
||||
# if error occur.
|
||||
|
||||
# get vim_info to access glance
|
||||
vim_info = self._get_vim_info(context, grant_req)
|
||||
if vim_info is None:
|
||||
msg = "No vimConnectionInfo to create glance image"
|
||||
raise sol_ex.LocalNfvoGrantFailed(sol_detail=msg)
|
||||
|
||||
try:
|
||||
image = self._glance_create_image(vim_info, vnfd, sw_data,
|
||||
grant_req.vnfInstanceId)
|
||||
except Exception:
|
||||
msg = "glance image create failed"
|
||||
LOG.exception(msg)
|
||||
raise sol_ex.LocalNfvoGrantFailed(sol_detail=msg)
|
||||
else:
|
||||
# there is no artifact. suppose image already created.
|
||||
image = sw_data['name']
|
||||
vim_sw_image = objects.VimSoftwareImageV1(
|
||||
vnfdSoftwareImageId=res_id,
|
||||
vimSoftwareImageId=image)
|
||||
vim_sw_images.append(vim_sw_image)
|
||||
if vim_sw_images:
|
||||
grant_res.vimAssets = objects.GrantV1_VimAssets(
|
||||
softwareImages=vim_sw_images
|
||||
)
|
||||
|
||||
def grant(self, context, grant_req):
|
||||
grant_res = objects.GrantV1(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnfInstanceId=grant_req.vnfInstanceId,
|
||||
vnfLcmOpOccId=grant_req.vnfLcmOpOccId
|
||||
)
|
||||
|
||||
# NOTE: considered instantiate only at the moment.
|
||||
# terminate is granted with no grant_res constructed.
|
||||
if grant_req.operation == v2_fields.LcmOperationType.INSTANTIATE:
|
||||
self.instantiate_grant(context, grant_req, grant_res)
|
||||
|
||||
endpoint = config.CONF.v2_vnfm.endpoint
|
||||
grant_res._links = objects.GrantV1_Links(
|
||||
vnfLcmOpOcc=objects.Link(
|
||||
href=lcmocc_utils.lcmocc_href(grant_req.vnfLcmOpOccId,
|
||||
endpoint)),
|
||||
vnfInstance=objects.Link(
|
||||
href=inst_utils.inst_href(grant_req.vnfInstanceId,
|
||||
endpoint))
|
||||
)
|
||||
grant_res._links.self = objects.Link(
|
||||
href="{}/grant/v1/grants/{}".format(endpoint, grant_res.id))
|
||||
|
||||
return grant_res
|
||||
|
||||
def _update_vnf_pkg_usage_state(self, context, vnfd_id, state):
|
||||
pkg_vnfd = vnf_package_vnfd.VnfPackageVnfd().get_by_vnfdId(
|
||||
context, vnfd_id)
|
||||
if not pkg_vnfd:
|
||||
# should not happen. just for code consistency.
|
||||
LOG.error("VnfPackage of vnfdID %s not found.", vnfd_id)
|
||||
return
|
||||
|
||||
vnf_pkg = vnf_package.VnfPackage().get_by_id(
|
||||
context, pkg_vnfd.package_uuid)
|
||||
if not vnf_pkg:
|
||||
# should not happen. just for code consistency.
|
||||
LOG.error("VnfPackage %s not found.", pkg_vnfd.package_uuid)
|
||||
return
|
||||
|
||||
# prevent raising exception since this method is not a part of VNFM.
|
||||
try:
|
||||
vnf_pkg.usage_state = state
|
||||
vnf_pkg.save()
|
||||
except Exception as ex:
|
||||
LOG.error("Update vnfPackage %s to %s failed: %s",
|
||||
pkg_vnfd.package_uuid, state, ex)
|
||||
|
||||
def recv_inst_create_notification(self, context, inst):
|
||||
# update vnfPackage usageState to IN_USE
|
||||
self._update_vnf_pkg_usage_state(context, inst.vnfdId,
|
||||
fields.PackageUsageStateType.IN_USE)
|
||||
|
||||
def recv_inst_delete_notification(self, context, inst):
|
||||
# update vnfPackage usageState to NOT_IN_USE
|
||||
self._update_vnf_pkg_usage_state(context, inst.vnfdId,
|
||||
fields.PackageUsageStateType.NOT_IN_USE)
|
||||
|
||||
def _glance_delete_images(self, vim_info, inst_id):
|
||||
# prevent raising exception since this method is not a part of VNFM.
|
||||
try:
|
||||
glance_client = glance_utils.GlanceClient(vim_info)
|
||||
images = glance_client.list_images(tag=inst_id)
|
||||
except Exception:
|
||||
LOG.error("Get glance images for vnfInstance %s failed.", inst_id)
|
||||
return
|
||||
|
||||
for image in images:
|
||||
try:
|
||||
glance_client.delete_image(image.id)
|
||||
LOG.debug("image deleted name: %s id: %s",
|
||||
image.name, image.id)
|
||||
except Exception:
|
||||
LOG.error("image delete %s failed.", image.id)
|
||||
|
||||
def recv_lcmocc_notification(self, context, lcmocc, inst):
|
||||
if lcmocc.operation == v2_fields.LcmOperationType.TERMINATE:
|
||||
if (lcmocc.operationState ==
|
||||
v2_fields.LcmOperationStateType.PROCESSING):
|
||||
# register vim_info of vnf instance so that
|
||||
# it is used later to delete glance image.
|
||||
vim_info = inst_utils.select_vim_info(inst.vimConnectionInfo)
|
||||
self.inst_vim_info[inst.id] = vim_info
|
||||
elif (lcmocc.operationState ==
|
||||
v2_fields.LcmOperationStateType.FAILED_TEMP):
|
||||
self.inst_vim_info.pop(inst.id, None)
|
||||
elif (lcmocc.operationState ==
|
||||
v2_fields.LcmOperationStateType.COMPLETED):
|
||||
vim_info = self.inst_vim_info.pop(inst.id, None)
|
||||
if vim_info is None:
|
||||
# never happen. just for code consistency.
|
||||
return
|
||||
self._glance_delete_images(vim_info, inst.id)
|
143
tacker/sol_refactored/nfvo/nfvo_client.py
Normal file
143
tacker/sol_refactored/nfvo/nfvo_client.py
Normal file
@ -0,0 +1,143 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
from oslo_log import log as logging
|
||||
|
||||
from tacker.sol_refactored.common import config
|
||||
from tacker.sol_refactored.common import http_client
|
||||
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
|
||||
from tacker.sol_refactored.common import subscription_utils as subsc_utils
|
||||
from tacker.sol_refactored.common import vnfd_utils
|
||||
from tacker.sol_refactored.nfvo import local_nfvo
|
||||
from tacker.sol_refactored import objects
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = config.CONF
|
||||
|
||||
|
||||
class NfvoClient(object):
|
||||
|
||||
def __init__(self):
|
||||
self.is_local = True
|
||||
self.nfvo = local_nfvo.LocalNfvo()
|
||||
|
||||
if CONF.v2_nfvo.use_external_nfvo:
|
||||
self.is_local = False
|
||||
auth_handle = http_client.OAuth2AuthHandle(
|
||||
CONF.v2_nfvo.endpoint,
|
||||
CONF.v2_nfvo.token_endpoint,
|
||||
CONF.v2_nfvo.client_id,
|
||||
CONF.v2_nfvo.client_password)
|
||||
self.client = http_client.HttpClient(auth_handle)
|
||||
self.grant_api_version = CONF.v2_nfvo.grant_api_version
|
||||
self.vnfpkgm_api_version = CONF.v2_nfvo.vnfpkgm_api_version
|
||||
|
||||
def get_vnf_package_info_vnfd(self, context, vnfd_id):
|
||||
if self.is_local:
|
||||
return self.nfvo.onboarded_show(context, vnfd_id)
|
||||
|
||||
url = "/vnfpkgm/v2/onboarded_vnf_packages/{}".format(vnfd_id)
|
||||
resp, body = self.client.do_request(
|
||||
url, "GET", expected_status=[200],
|
||||
version=self.vnfpkgm_api_version)
|
||||
LOG.debug("vnfpkg_info_vnfd: %s" % body)
|
||||
return objects.VnfPkgInfoV2.from_dict(body)
|
||||
|
||||
def onboarded_show_vnfd(self, context, vnfd_id):
|
||||
if self.is_local:
|
||||
# this is not happen. will raise internal server error.
|
||||
LOG.error("onboarded_show_vnfd is called.")
|
||||
return
|
||||
|
||||
url = "/vnfpkgm/v2/onboarded_vnf_packages/{}/vnfd".format(vnfd_id)
|
||||
resp, body = self.client.do_request(
|
||||
url, "GET", expected_status=[200],
|
||||
version=self.vnfpkgm_api_version)
|
||||
return body
|
||||
|
||||
def onboarded_package_content(self, context, vnfd_id):
|
||||
if self.is_local:
|
||||
# this is not happen. will raise internal server error.
|
||||
LOG.error("onboarded_package_content is called.")
|
||||
return
|
||||
|
||||
url = "/vnfpkgm/v2/onboarded_vnf_packages/{}/package_content"
|
||||
url = url.format(vnfd_id)
|
||||
resp, body = self.client.do_request(
|
||||
url, "GET", expected_status=[200],
|
||||
version=self.vnfpkgm_api_version)
|
||||
return body
|
||||
|
||||
def grant(self, context, grant_req):
|
||||
LOG.debug("grant request: %s", grant_req.to_dict())
|
||||
|
||||
if self.is_local:
|
||||
grant_res = self.nfvo.grant(context, grant_req)
|
||||
else:
|
||||
url = "/grant/v2/grants"
|
||||
resp, body = self.client.do_request(
|
||||
url, "POST", expected_status=[201], body=grant_req,
|
||||
version=self.grant_api_version)
|
||||
grant_res = objects.GrantV1.from_dict(body)
|
||||
|
||||
LOG.debug("grant response: %s", grant_res.to_dict())
|
||||
return grant_res
|
||||
|
||||
def get_vnfd(self, context, vnfd_id, all_contents=False):
|
||||
if self.is_local:
|
||||
return self.nfvo.get_vnfd(context, vnfd_id)
|
||||
|
||||
if all_contents:
|
||||
zip_file = self.onboarded_package_content(context, vnfd_id)
|
||||
else:
|
||||
zip_file = self.onboarded_show_vnfd(context, vnfd_id)
|
||||
|
||||
vnfd = vnfd_utils.Vnfd(vnfd_id)
|
||||
vnfd.init_from_zip_file(zip_file)
|
||||
|
||||
return vnfd
|
||||
|
||||
def send_inst_create_notification(self, context, inst, endpoint):
|
||||
subscs = subsc_utils.get_inst_create_subscs(context, inst)
|
||||
for subsc in subscs:
|
||||
notif_data = subsc_utils.make_create_inst_notif_data(
|
||||
subsc, inst, endpoint)
|
||||
subsc_utils.send_notification(subsc, notif_data)
|
||||
|
||||
if self.is_local:
|
||||
self.nfvo.recv_inst_create_notification(context, inst)
|
||||
|
||||
def send_inst_delete_notification(self, context, inst, endpoint):
|
||||
subscs = subsc_utils.get_inst_delete_subscs(context, inst)
|
||||
for subsc in subscs:
|
||||
notif_data = subsc_utils.make_delete_inst_notif_data(
|
||||
subsc, inst, endpoint)
|
||||
subsc_utils.send_notification(subsc, notif_data)
|
||||
|
||||
if self.is_local:
|
||||
self.nfvo.recv_inst_delete_notification(context, inst)
|
||||
|
||||
def send_lcmocc_notification(self, context, lcmocc, inst, endpoint):
|
||||
subscs = subsc_utils.get_lcmocc_subscs(context, lcmocc, inst)
|
||||
for subsc in subscs:
|
||||
notif_data = lcmocc_utils.make_lcmocc_notif_data(
|
||||
subsc, lcmocc, endpoint)
|
||||
subsc_utils.send_notification(subsc, notif_data)
|
||||
|
||||
if self.is_local:
|
||||
self.nfvo.recv_lcmocc_notification(context, lcmocc, inst)
|
125
tacker/sol_refactored/objects/__init__.py
Normal file
125
tacker/sol_refactored/objects/__init__.py
Normal file
@ -0,0 +1,125 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tacker.sol_refactored.db import api as db_api
|
||||
from tacker.sol_refactored.objects.common import fields # noqa
|
||||
|
||||
# NOTE: You may scratch your head as you see code that imports
|
||||
# this module and then accesses attributes for objects such as Instance,
|
||||
# etc, yet you do not see these attributes in here. Never fear, there is
|
||||
# a little bit of magic. When objects are registered, an attribute is set
|
||||
# on this module automatically, pointing to the newest/latest version of
|
||||
# the object.
|
||||
|
||||
|
||||
def register_all(init_db=True):
|
||||
# NOTE: You must make sure your object gets imported in this
|
||||
# function in order for it to be registered by services that may
|
||||
# need to receive it via RPC.
|
||||
objects_root = 'tacker.sol_refactored.objects'
|
||||
__import__(objects_root + '.common.cp_protocol_data')
|
||||
__import__(objects_root + '.common.ext_link_port_data')
|
||||
__import__(objects_root + '.common.ext_managed_virtual_link_data')
|
||||
__import__(objects_root + '.common.ext_virtual_link_data')
|
||||
__import__(objects_root + '.common.ip_over_ethernet_address_data')
|
||||
__import__(objects_root + '.common.link')
|
||||
__import__(objects_root + '.common.notification_link')
|
||||
__import__(objects_root + '.common.problem_details')
|
||||
__import__(objects_root + '.common.resource_handle')
|
||||
__import__(objects_root + '.common.subscription_authentication')
|
||||
__import__(objects_root + '.common.vim_connection_info')
|
||||
__import__(objects_root + '.common.vnf_ext_cp_config')
|
||||
__import__(objects_root + '.common.vnf_ext_cp_data')
|
||||
__import__(objects_root + '.common.vnf_instance_subscription_filter')
|
||||
__import__(objects_root + '.v1.constraint_resource_ref')
|
||||
__import__(objects_root + '.v1.fields')
|
||||
__import__(objects_root + '.v1.grant_info')
|
||||
__import__(objects_root + '.v1.grant')
|
||||
__import__(objects_root + '.v1.grant_request')
|
||||
__import__(objects_root + '.v1.placement_constraint')
|
||||
__import__(objects_root + '.v1.resource_definition')
|
||||
__import__(objects_root + '.v1.snapshot_resource_definition')
|
||||
__import__(objects_root + '.v1.vim_compute_resource_flavour')
|
||||
__import__(objects_root + '.v1.vim_constraint')
|
||||
__import__(objects_root + '.v1.vim_snapshot_resource')
|
||||
__import__(objects_root + '.v1.vim_software_image')
|
||||
__import__(objects_root + '.v1.zone_group_info')
|
||||
__import__(objects_root + '.v1.zone_info')
|
||||
__import__(objects_root + '.v2.affected_ext_link_port')
|
||||
__import__(objects_root + '.v2.affected_virtual_link')
|
||||
__import__(objects_root + '.v2.affected_virtual_storage')
|
||||
__import__(objects_root + '.v2.affected_vnfc')
|
||||
__import__(objects_root + '.v2.cancel_mode')
|
||||
__import__(objects_root + '.v2.change_current_vnf_pkg_request')
|
||||
__import__(objects_root + '.v2.change_ext_vnf_connectivity_request')
|
||||
__import__(objects_root + '.v2.change_vnf_flavour_request')
|
||||
__import__(objects_root + '.v2.cp_protocol_info')
|
||||
__import__(objects_root + '.v2.create_vnf_pkg_info_request')
|
||||
__import__(objects_root + '.v2.create_vnf_request')
|
||||
__import__(objects_root + '.v2.create_vnf_snapshot_info_request')
|
||||
__import__(objects_root + '.v2.create_vnf_snapshot_request')
|
||||
__import__(objects_root + '.v2.external_artifacts_access_config')
|
||||
__import__(objects_root + '.v2.ext_link_port_info')
|
||||
__import__(objects_root + '.v2.ext_managed_virtual_link_info')
|
||||
__import__(objects_root + '.v2.ext_virtual_link_info')
|
||||
__import__(objects_root + '.v2.fields')
|
||||
__import__(objects_root + '.v2.heal_vnf_request')
|
||||
__import__(objects_root + '.v2.instantiate_vnf_request')
|
||||
__import__(objects_root + '.v2.ip_over_ethernet_address_info')
|
||||
__import__(objects_root + '.v2.lccn_links')
|
||||
__import__(objects_root + '.v2.lccn_subscription')
|
||||
__import__(objects_root + '.v2.lccn_subscription_request')
|
||||
__import__(objects_root + '.v2.lifecycle_change_notifications_filter')
|
||||
__import__(objects_root + '.v2.modifications_triggered_by_vnf_pkg_change')
|
||||
__import__(objects_root + '.v2.monitoring_parameter')
|
||||
__import__(objects_root + '.v2.operate_vnf_request')
|
||||
__import__(objects_root + '.v2.pkgm_links')
|
||||
__import__(objects_root + '.v2.pkgm_notification_filter')
|
||||
__import__(objects_root + '.v2.pkgm_subscription_request')
|
||||
__import__(objects_root + '.v2.revert_to_vnf_snapshot_request')
|
||||
__import__(objects_root + '.v2.scale_info')
|
||||
__import__(objects_root + '.v2.scale_vnf_request')
|
||||
__import__(objects_root + '.v2.scale_vnf_to_level_request')
|
||||
__import__(objects_root + '.v2.terminate_vnf_request')
|
||||
__import__(objects_root + '.v2.upload_vnf_package_from_uri_request')
|
||||
__import__(objects_root + '.v2.virtual_storage_resource_info')
|
||||
__import__(objects_root + '.v2.vnfc_info')
|
||||
__import__(objects_root + '.v2.vnfc_resource_info')
|
||||
__import__(objects_root + '.v2.vnfc_snapshot_info')
|
||||
__import__(objects_root + '.v2.vnf_ext_cp_info')
|
||||
__import__(objects_root + '.v2.vnf_identifier_creation_notification')
|
||||
__import__(objects_root + '.v2.vnf_identifier_deletion_notification')
|
||||
__import__(objects_root + '.v2.vnf_info_modification_request')
|
||||
__import__(objects_root + '.v2.vnf_info_modifications')
|
||||
__import__(objects_root + '.v2.vnf_instance')
|
||||
__import__(objects_root + '.v2.vnf_lcm_operation_occurrence_notification')
|
||||
__import__(objects_root + '.v2.vnf_lcm_op_occ')
|
||||
__import__(objects_root + '.v2.vnf_link_port_data')
|
||||
__import__(objects_root + '.v2.vnf_link_port_info')
|
||||
__import__(objects_root + '.v2.vnf_package_artifact_info')
|
||||
__import__(objects_root + '.v2.vnf_package_change_notification')
|
||||
__import__(objects_root + '.v2.vnf_package_onboarding_notification')
|
||||
__import__(objects_root + '.v2.vnf_package_software_image_info')
|
||||
__import__(objects_root + '.v2.vnf_pkg_info_modifications')
|
||||
__import__(objects_root + '.v2.vnf_pkg_info')
|
||||
__import__(objects_root + '.v2.vnf_snapshot_info_modification_request')
|
||||
__import__(objects_root + '.v2.vnf_snapshot_info_modifications')
|
||||
__import__(objects_root + '.v2.vnf_snapshot_info')
|
||||
__import__(objects_root + '.v2.vnf_snapshot')
|
||||
__import__(objects_root + '.v2.vnf_state_snapshot_info')
|
||||
__import__(objects_root + '.v2.vnf_virtual_link_resource_info')
|
||||
|
||||
if init_db:
|
||||
db_api.configure()
|
440
tacker/sol_refactored/objects/base.py
Normal file
440
tacker/sol_refactored/objects/base.py
Normal file
@ -0,0 +1,440 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import collections
|
||||
import contextlib
|
||||
import datetime
|
||||
|
||||
from oslo_log import log as logging
|
||||
import oslo_messaging as messaging
|
||||
from oslo_serialization import jsonutils
|
||||
from oslo_utils import versionutils
|
||||
from oslo_versionedobjects import base as ovoo_base
|
||||
from oslo_versionedobjects import exception as ovoo_exc
|
||||
|
||||
from tacker.sol_refactored.db import api as db_api
|
||||
from tacker.sol_refactored.db.sqlalchemy import models
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.sol_refactored.objects import fields as obj_fields
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_attrname(name):
|
||||
"""Return the mangled name of the attribute's underlying storage."""
|
||||
return '_obj_' + name
|
||||
|
||||
|
||||
# Every TackerObject has its attributes named exactly same as the corresponding
|
||||
# ETSI NFV-SOL data type attributes. In almost all cases, so as its one-to-one
|
||||
# mapped model attributes and DB column names. However, some field names
|
||||
# cannot be used for model attributes (e.g., 'metadata'), in which case we
|
||||
# append a suffix '__' (double underscores) as a workaround.
|
||||
#
|
||||
# Note that TackerObject users need not to concern about this.
|
||||
#
|
||||
# Example:
|
||||
# - NFV-SOL data type "VnfXXX"
|
||||
# - id
|
||||
# - name
|
||||
# - metadata
|
||||
# - class VnfXXX(base.TackerPersistentObject) attributes
|
||||
# - id
|
||||
# - name
|
||||
# - metadata
|
||||
# - class VnfXXX(model_base.BASE) attributes
|
||||
# - id
|
||||
# - name
|
||||
# - metadata__ <-- 'metadata' is one of reserved names and cannot be used.
|
||||
# - DB column names:
|
||||
# - id
|
||||
# - name
|
||||
# - metadata
|
||||
RESERVED_FIELD_NAMES = [
|
||||
'metadata',
|
||||
]
|
||||
|
||||
|
||||
def get_model_field(name):
|
||||
if name in RESERVED_FIELD_NAMES:
|
||||
return name + '__'
|
||||
return name
|
||||
|
||||
|
||||
class TackerObjectRegistry(ovoo_base.VersionedObjectRegistry):
|
||||
notification_classes = []
|
||||
_registry = None
|
||||
|
||||
# NOTE: Separate registry from original tacker objects.
|
||||
# When original tacker objects are gone, _registry and
|
||||
# __new__ can be removed.
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if not TackerObjectRegistry._registry:
|
||||
TackerObjectRegistry._registry = object.__new__(
|
||||
TackerObjectRegistry, *args, **kwargs)
|
||||
TackerObjectRegistry._registry._obj_classes = \
|
||||
collections.defaultdict(list)
|
||||
self = object.__new__(cls, *args, **kwargs)
|
||||
self._obj_classes = TackerObjectRegistry._registry._obj_classes
|
||||
return self
|
||||
|
||||
def registration_hook(self, cls, index):
|
||||
# NOTE: This is called when an object is registered,
|
||||
# and is responsible for maintaining tacker.objects.$OBJECT
|
||||
# as the highest-versioned implementation of a given object.
|
||||
version = versionutils.convert_version_to_tuple(cls.VERSION)
|
||||
if not hasattr(objects, cls.obj_name()):
|
||||
setattr(objects, cls.obj_name(), cls)
|
||||
else:
|
||||
cur_version = versionutils.convert_version_to_tuple(
|
||||
getattr(objects, cls.obj_name()).VERSION)
|
||||
if version >= cur_version:
|
||||
setattr(objects, cls.obj_name(), cls)
|
||||
|
||||
|
||||
class TackerObject(ovoo_base.VersionedObject):
|
||||
# These should be renamed (s/_sol_refactored//) when the older
|
||||
# implementation ceases to exist.
|
||||
OBJ_SERIAL_NAMESPACE = 'tacker_sol_refactored_object'
|
||||
OBJ_PROJECT_NAMESPACE = 'tacker_sol_refactored'
|
||||
|
||||
def __init__(self, context=None, **kwargs):
|
||||
super(TackerObject, self).__init__(context, **kwargs)
|
||||
self.obj_set_defaults()
|
||||
|
||||
def tacker_obj_get_changes(self):
|
||||
"""Returns a dict of changed fields with tz unaware datetimes.
|
||||
|
||||
Any timezone aware datetime field will be converted to UTC timezone
|
||||
and returned as timezone unaware datetime.
|
||||
|
||||
This will allow us to pass these fields directly to a db update
|
||||
method as they can't have timezone information.
|
||||
"""
|
||||
# Get dirtied/changed fields
|
||||
changes = self.obj_get_changes()
|
||||
|
||||
# Look for datetime objects that contain timezone information
|
||||
for k, v in changes.items():
|
||||
if isinstance(v, datetime.datetime) and v.tzinfo:
|
||||
# Remove timezone information and adjust the time according to
|
||||
# the timezone information's offset.
|
||||
changes[k] = v.replace(tzinfo=None) - v.utcoffset()
|
||||
|
||||
# Return modified dict
|
||||
return changes
|
||||
|
||||
def obj_reset_changes(self, fields=None, recursive=False):
|
||||
"""Reset the list of fields that have been changed.
|
||||
|
||||
.. note::
|
||||
|
||||
- This is NOT "revert to previous values"
|
||||
- Specifying fields on recursive resets will only be honored at the
|
||||
top level. Everything below the top will reset all.
|
||||
|
||||
:param fields: List of fields to reset, or "all" if None.
|
||||
:param recursive: Call obj_reset_changes(recursive=True) on
|
||||
any sub-objects within the list of fields
|
||||
being reset.
|
||||
"""
|
||||
if recursive:
|
||||
for field in self.obj_get_changes():
|
||||
|
||||
# Ignore fields not in requested set (if applicable)
|
||||
if fields and field not in fields:
|
||||
continue
|
||||
|
||||
# Skip any fields that are unset
|
||||
if not self.obj_attr_is_set(field):
|
||||
continue
|
||||
|
||||
value = getattr(self, field)
|
||||
|
||||
# Don't reset nulled fields
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
# Reset straight Object and ListOfObjects fields
|
||||
if isinstance(self.fields[field], obj_fields.ObjectField):
|
||||
value.obj_reset_changes(recursive=True)
|
||||
elif isinstance(self.fields[field],
|
||||
obj_fields.ListOfObjectsField):
|
||||
for thing in value:
|
||||
thing.obj_reset_changes(recursive=True)
|
||||
elif isinstance(self.fields[field],
|
||||
obj_fields.DictOfObjectsField):
|
||||
for key, thing in value.items():
|
||||
thing.obj_reset_changes(recursive=True)
|
||||
|
||||
if fields:
|
||||
self._changed_fields -= set(fields)
|
||||
else:
|
||||
self._changed_fields.clear()
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dict_obj):
|
||||
inst = cls()
|
||||
for name, field in cls.fields.items():
|
||||
value = dict_obj.get(name, None)
|
||||
if value is None:
|
||||
continue
|
||||
if isinstance(field, obj_fields.ObjectField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, cls.VERSION)
|
||||
setattr(inst, name, child_cls.from_dict(value))
|
||||
elif isinstance(field, obj_fields.ListOfObjectsField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, cls.VERSION)
|
||||
list_of_objects = []
|
||||
for thing in value:
|
||||
list_of_objects.append(child_cls.from_dict(thing))
|
||||
setattr(inst, name, list_of_objects)
|
||||
elif isinstance(field, obj_fields.DictOfObjectsField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, cls.VERSION)
|
||||
dict_of_objects = {}
|
||||
for key, thing in value.items():
|
||||
dict_of_objects[key] = child_cls.from_dict(thing)
|
||||
setattr(inst, name, dict_of_objects)
|
||||
else:
|
||||
setattr(inst, name, field.from_primitive(inst, name, value))
|
||||
return inst
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_obj):
|
||||
return cls.from_dict(jsonutils.loads(json_obj))
|
||||
|
||||
def to_dict(self):
|
||||
obj = {}
|
||||
for name, field in self.fields.items():
|
||||
if not self.obj_attr_is_set(name):
|
||||
continue
|
||||
if isinstance(field, obj_fields.ObjectField):
|
||||
obj[name] = getattr(self, name).to_dict()
|
||||
elif isinstance(field, obj_fields.ListOfObjectsField):
|
||||
obj[name] = []
|
||||
for item in getattr(self, name):
|
||||
obj[name].append(item.to_dict())
|
||||
elif isinstance(field, obj_fields.DictOfObjectsField):
|
||||
obj[name] = {}
|
||||
for key, item in getattr(self, name).items():
|
||||
obj[name][key] = item.to_dict()
|
||||
else:
|
||||
obj[name] = field.to_primitive(self, name, getattr(self, name))
|
||||
return obj
|
||||
|
||||
def to_json(self):
|
||||
return jsonutils.dumps(self.to_dict())
|
||||
|
||||
@contextlib.contextmanager
|
||||
def obj_alternate_context(self, context):
|
||||
original_context = self._context
|
||||
self._context = context
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self._context = original_context
|
||||
|
||||
@classmethod
|
||||
def obj_class_from_name(cls, objname, objver):
|
||||
"""Returns a class from the registry based on a name and version."""
|
||||
if objname not in TackerObjectRegistry.obj_classes():
|
||||
LOG.error('Unable to instantiate unregistered object type '
|
||||
'%(objtype)s'), dict(objtype=objname)
|
||||
raise ovoo_exc.UnsupportedObjectError(objtype=objname)
|
||||
|
||||
# NOTE: only the newest version is registered by registration_hook.
|
||||
# omit version check.
|
||||
return TackerObjectRegistry.obj_classes()[objname][0]
|
||||
|
||||
|
||||
class TackerObjectSerializer(messaging.NoOpSerializer):
|
||||
"""A TackerObject-aware Serializer.
|
||||
|
||||
This implements the Oslo Serializer interface and provides the
|
||||
ability to serialize and deserialize TackerObject entities. Any service
|
||||
that needs to accept or return TackerObjects as arguments or result values
|
||||
should pass this to its RPCClient and RPCServer objects.
|
||||
"""
|
||||
|
||||
def _process_object(self, context, objprim):
|
||||
return TackerObject.obj_from_primitive(objprim, context=context)
|
||||
|
||||
def _process_iterable(self, context, action_fn, values):
|
||||
"""Process an iterable, taking an action on each value.
|
||||
|
||||
:param:context: Request context
|
||||
:param:action_fn: Action to take on each item in values
|
||||
:param:values: Iterable container of things to take action on
|
||||
:returns: A new container of the same type (except set) with
|
||||
items from values having had action applied.
|
||||
"""
|
||||
|
||||
iterable = values.__class__
|
||||
if issubclass(iterable, dict):
|
||||
return iterable(**{k: action_fn(context, v)
|
||||
for k, v in values.items()})
|
||||
|
||||
# NOTE: A set can't have an unhashable value inside,
|
||||
# such as a dict. Convert the set to list, which is fine, since we
|
||||
# can't send them over RPC anyway. We convert it to list as this
|
||||
# way there will be no semantic change between the fake rpc driver
|
||||
# used in functional test and a normal rpc driver.
|
||||
if iterable == set:
|
||||
iterable = list
|
||||
return iterable([action_fn(context, value) for value in values])
|
||||
|
||||
def serialize_entity(self, context, entity):
|
||||
if isinstance(entity, (tuple, list, set, dict)):
|
||||
entity = self._process_iterable(context, self.serialize_entity,
|
||||
entity)
|
||||
elif (hasattr(entity, 'obj_to_primitive') and
|
||||
callable(entity.obj_to_primitive)):
|
||||
entity = entity.obj_to_primitive()
|
||||
return entity
|
||||
|
||||
def deserialize_entity(self, context, entity):
|
||||
if (isinstance(entity, dict) and
|
||||
TackerObject._obj_primitive_key('name') in entity):
|
||||
entity = self._process_object(context, entity)
|
||||
elif isinstance(entity, (tuple, list, set, dict)):
|
||||
entity = self._process_iterable(context, self.deserialize_entity,
|
||||
entity)
|
||||
return entity
|
||||
|
||||
|
||||
class TackerPersistentObject(TackerObject):
|
||||
"""Class for objects supposed to be to DB."""
|
||||
|
||||
def __init__(self, context=None, **kwargs):
|
||||
super(TackerPersistentObject, self).__init__(context, **kwargs)
|
||||
self._db_obj = None
|
||||
|
||||
# By default, it's assumed that there is a model class corresponding to one
|
||||
# TackerPersistentObject, which has the same named fields.
|
||||
def _get_model_cls(self):
|
||||
clsname = self.__class__.__name__
|
||||
return getattr(models, clsname)
|
||||
|
||||
@db_api.context_manager.writer
|
||||
def _save(self, context, merge=False):
|
||||
if not self.obj_get_changes():
|
||||
return
|
||||
model_cls = self._get_model_cls()
|
||||
inst = model_cls()
|
||||
inst.update(self.to_db_obj())
|
||||
# note: The same workaround is present in oslo.db ModelBase.save()
|
||||
# implementation.
|
||||
with context.session.begin(subtransactions=True):
|
||||
if merge:
|
||||
context.session.merge(inst, load=True)
|
||||
else:
|
||||
context.session.add(inst)
|
||||
context.session.flush()
|
||||
# 'flush' must have succeeded because we are here.
|
||||
self.obj_reset_changes()
|
||||
|
||||
@db_api.context_manager.writer
|
||||
def delete(self, context):
|
||||
if self._db_obj is None:
|
||||
return
|
||||
context.session.delete(self._db_obj)
|
||||
|
||||
# WARNING: Check if it is really necessary if you consider overriding this.
|
||||
def create(self, context):
|
||||
self._save(context)
|
||||
|
||||
# WARNING: Check if it is really necessary if you consider overriding this.
|
||||
def update(self, context):
|
||||
self._save(context, merge=True)
|
||||
|
||||
@classmethod
|
||||
@db_api.context_manager.reader
|
||||
def get_by_id(cls, context, id):
|
||||
model_cls = getattr(models, cls.__name__)
|
||||
query = context.session.query(model_cls).filter(model_cls.id == id)
|
||||
result = query.one_or_none()
|
||||
if result is None:
|
||||
return None
|
||||
return cls.from_db_obj(result)
|
||||
|
||||
@classmethod
|
||||
@db_api.context_manager.reader
|
||||
def get_all(cls, context):
|
||||
model_cls = getattr(models, cls.__name__)
|
||||
query = context.session.query(model_cls)
|
||||
result = query.all()
|
||||
return [cls.from_db_obj(item) for item in result]
|
||||
|
||||
@classmethod
|
||||
def from_db_obj(cls, db_obj):
|
||||
inst = cls()
|
||||
for name, field in cls.fields.items():
|
||||
name_ = get_model_field(name)
|
||||
value = db_obj.get(name_, None)
|
||||
if value is None:
|
||||
continue
|
||||
if isinstance(field, obj_fields.ObjectField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, None)
|
||||
setattr(inst, name, child_cls.from_json(value))
|
||||
elif isinstance(field, obj_fields.ListOfObjectsField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, None)
|
||||
list_of_objects = []
|
||||
value_loaded = jsonutils.loads(value)
|
||||
for thing in value_loaded:
|
||||
list_of_objects.append(child_cls.from_dict(thing))
|
||||
setattr(inst, name, list_of_objects)
|
||||
elif isinstance(field, obj_fields.DictOfObjectsField):
|
||||
child_cls = cls.obj_class_from_name(field.objname, None)
|
||||
dict_of_objects = {}
|
||||
value_loaded = jsonutils.loads(value)
|
||||
for key, thing in value_loaded.items():
|
||||
dict_of_objects[key] = child_cls.from_dict(thing)
|
||||
setattr(inst, name, dict_of_objects)
|
||||
elif isinstance(field, obj_fields.DateTimeField):
|
||||
setattr(inst, name, value)
|
||||
else:
|
||||
setattr(inst, name, field.from_primitive(inst, name, value))
|
||||
inst._db_obj = db_obj
|
||||
inst.obj_reset_changes()
|
||||
return inst
|
||||
|
||||
def to_db_obj(self):
|
||||
obj = {}
|
||||
for name, field in self.fields.items():
|
||||
name_ = get_model_field(name)
|
||||
if not self.obj_attr_is_set(name):
|
||||
continue
|
||||
if isinstance(field, obj_fields.ObjectField):
|
||||
obj[name_] = getattr(self, name).to_json()
|
||||
elif isinstance(field, obj_fields.ListOfObjectsField):
|
||||
list_of_objects = []
|
||||
for item in getattr(self, name):
|
||||
list_of_objects.append(item.to_dict())
|
||||
obj[name_] = jsonutils.dumps(list_of_objects)
|
||||
elif isinstance(field, obj_fields.DictOfObjectsField):
|
||||
dict_of_objects = {}
|
||||
for key, item in getattr(self, name).items():
|
||||
dict_of_objects[key] = item.to_dict()
|
||||
obj[name_] = jsonutils.dumps(dict_of_objects)
|
||||
elif isinstance(field, obj_fields.DateTimeField):
|
||||
obj[name_] = getattr(self, name)
|
||||
else:
|
||||
obj[name_] = field.to_primitive(self, name,
|
||||
getattr(self, name))
|
||||
return obj
|
||||
|
||||
|
||||
TackerObjectDictCompat = ovoo_base.VersionedObjectDictCompat
|
171
tacker/sol_refactored/test-tools/cli.py
Normal file
171
tacker/sol_refactored/test-tools/cli.py
Normal file
@ -0,0 +1,171 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
from tacker.sol_refactored.common import http_client
|
||||
|
||||
|
||||
# NOTE: This is the tool which intended to use it about APIs under development.
|
||||
# This is not maintained at all, but reuse the code for the new APIs
|
||||
# development.
|
||||
|
||||
auth_url = "http://127.0.0.1/identity/v3"
|
||||
username = "nfv_user"
|
||||
password = "devstack"
|
||||
project_name = "nfv"
|
||||
domain_name = "Default"
|
||||
user_domain_name = "Default"
|
||||
project_domain_name = "Default"
|
||||
|
||||
|
||||
class Client(object):
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.auth = http_client.KeystonePasswordAuthHandle(
|
||||
auth_url=auth_url,
|
||||
username=username,
|
||||
password=password,
|
||||
project_name=project_name,
|
||||
user_domain_name=user_domain_name,
|
||||
project_domain_name=project_domain_name)
|
||||
|
||||
self.client = http_client.HttpClient(self.auth)
|
||||
|
||||
def print(self, resp, body):
|
||||
print(resp.status_code)
|
||||
print(resp.headers)
|
||||
print()
|
||||
print(body)
|
||||
|
||||
def create(self, req_body):
|
||||
resp, body = self.client.do_request(
|
||||
self.path, "POST", body=req_body, version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
def list(self, req_body):
|
||||
if req_body is not None:
|
||||
resp, body = self.client.do_request(
|
||||
self.path, "GET", version="2.0.0", params=req_body)
|
||||
else:
|
||||
resp, body = self.client.do_request(
|
||||
self.path, "GET", version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
def show(self, id):
|
||||
resp, body = self.client.do_request(
|
||||
self.path + '/' + id, "GET", version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
def delete(self, id):
|
||||
resp, body = self.client.do_request(
|
||||
self.path + '/' + id, "DELETE", version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
def inst(self, id, req_body):
|
||||
path = self.path + '/' + id + '/instantiate'
|
||||
resp, body = self.client.do_request(
|
||||
path, "POST", body=req_body, version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
def term(self, id, req_body):
|
||||
path = self.path + '/' + id + '/terminate'
|
||||
resp, body = self.client.do_request(
|
||||
path, "POST", body=req_body, version="2.0.0")
|
||||
self.print(resp, body)
|
||||
|
||||
|
||||
def usage():
|
||||
print("usage: cli resource action [arg...]")
|
||||
print(" inst create body(path of content)")
|
||||
print(" inst list [body(path of content)]")
|
||||
print(" inst show {id}")
|
||||
print(" inst delete {id}")
|
||||
print(" inst inst {id} body(path of content)")
|
||||
print(" inst term {id} body(path of content)")
|
||||
print(" subsc create body(path of content)")
|
||||
print(" subsc list [body(path of content)]")
|
||||
print(" subsc show {id}")
|
||||
print(" subsc delete {id}")
|
||||
print(" lcmocc list [body(path of content)]")
|
||||
print(" lcmocc show {id}")
|
||||
print(" lcmocc delete {id}")
|
||||
os._exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 3:
|
||||
usage()
|
||||
resource = sys.argv[1]
|
||||
action = sys.argv[2]
|
||||
if resource not in ["inst", "subsc", "lcmocc"]:
|
||||
usage()
|
||||
if resource == "inst":
|
||||
if action not in ["create", "list", "show", "delete", "inst", "term"]:
|
||||
usage()
|
||||
if resource == "subsc":
|
||||
if action not in ["create", "list", "show", "delete"]:
|
||||
usage()
|
||||
if resource == "lcmocc":
|
||||
if action not in ["list", "show", "delete"]:
|
||||
usage()
|
||||
if action in ["create", "show", "delete"]:
|
||||
if len(sys.argv) != 4:
|
||||
usage()
|
||||
arg1 = sys.argv[3]
|
||||
elif action in ["inst", "term"]:
|
||||
if len(sys.argv) != 5:
|
||||
usage()
|
||||
arg1 = sys.argv[3]
|
||||
arg2 = sys.argv[4]
|
||||
else: # list
|
||||
arg1 = None
|
||||
if len(sys.argv) == 4:
|
||||
arg1 = sys.argv[3]
|
||||
elif len(sys.argv) != 3:
|
||||
usage()
|
||||
|
||||
if resource == "inst":
|
||||
client = Client("/vnflcm/v2/vnf_instances")
|
||||
elif resource == "subsc":
|
||||
client = Client("/vnflcm/v2/subscriptions")
|
||||
elif resource == "lcmocc":
|
||||
client = Client("/vnflcm/v2/vnf_lcm_op_occs")
|
||||
|
||||
if action == "create":
|
||||
with open(arg1) as fp:
|
||||
body = json.load(fp)
|
||||
client.create(body)
|
||||
elif action == "list":
|
||||
body = None
|
||||
if arg1 is not None:
|
||||
with open(arg1) as fp:
|
||||
body = json.load(fp)
|
||||
client.list(body)
|
||||
elif action == "show":
|
||||
client.show(arg1)
|
||||
elif action == "delete":
|
||||
client.delete(arg1)
|
||||
elif action == "inst":
|
||||
with open(arg2) as fp:
|
||||
body = json.load(fp)
|
||||
client.inst(arg1, body)
|
||||
elif action == "term":
|
||||
with open(arg2) as fp:
|
||||
body = json.load(fp)
|
||||
client.term(arg1, body)
|
111
tacker/sol_refactored/test-tools/notif_endpoint_app.py
Normal file
111
tacker/sol_refactored/test-tools/notif_endpoint_app.py
Normal file
@ -0,0 +1,111 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
from wsgiref.simple_server import make_server
|
||||
|
||||
|
||||
PORT = 6789
|
||||
|
||||
|
||||
def handle_notification(environ, start_response):
|
||||
method = environ['REQUEST_METHOD']
|
||||
print("notification %s" % method)
|
||||
if method not in ['GET', 'POST']:
|
||||
print(" not support method")
|
||||
start_response('405 not suportted method',
|
||||
[('Content-Type', 'application/problem+json')])
|
||||
problem_detail = {'status': 405,
|
||||
'detail': "not supported method"}
|
||||
body = json.dumps(problem_detail)
|
||||
return [body.encode('utf-8')]
|
||||
|
||||
authorization = environ.get("HTTP_AUTHORIZATION", "")
|
||||
version = environ.get("HTTP_VERSION", "")
|
||||
print(" authorizarion: %s" % authorization)
|
||||
print(" version: %s" % version)
|
||||
|
||||
if method == 'POST':
|
||||
length = environ.get('CONTENT_LENGTH')
|
||||
print(" content_length: %s" % length)
|
||||
body = environ.get('wsgi.input').read(int(length))
|
||||
body = json.loads(body.decode('utf-8'))
|
||||
print(" request body: %s" % body)
|
||||
|
||||
start_response('204 No content', [])
|
||||
return []
|
||||
|
||||
|
||||
def handle_token(environ, start_response):
|
||||
method = environ['REQUEST_METHOD']
|
||||
print("token %s" % method)
|
||||
if method not in ['POST']:
|
||||
print(" not support method")
|
||||
start_response('405 not suportted method',
|
||||
[('Content-Type', 'application/problem+json')])
|
||||
problem_detail = {'status': 405,
|
||||
'detail': "not supported method"}
|
||||
body = json.dumps(problem_detail)
|
||||
return [body.encode('utf-8')]
|
||||
|
||||
authorization = environ.get("HTTP_AUTHORIZATION", "")
|
||||
version = environ.get("HTTP_VERSION", "")
|
||||
content_type = environ.get("CONTENT_TYPE")
|
||||
print(" authorizarion: %s" % authorization)
|
||||
print(" version: %s" % version)
|
||||
print(" content_type: %s" % content_type)
|
||||
|
||||
length = environ.get('CONTENT_LENGTH')
|
||||
print(" content_length: %s" % length)
|
||||
body = environ.get('wsgi.input').read(int(length))
|
||||
body = body.decode('utf-8')
|
||||
print(" request body: %s" % body)
|
||||
|
||||
item = body.split('=')
|
||||
if (len(item) != 2 or
|
||||
item[0] != 'grant_type' or
|
||||
item[1] != 'client_credentials'):
|
||||
start_response('400 Bad Request', [])
|
||||
return []
|
||||
|
||||
start_response('200 OK', [('Content-Type', 'application/json')])
|
||||
data = {
|
||||
"access_token": "2YotnFZFEjr1zCsicMWpAA",
|
||||
"token_type": "example",
|
||||
"expires_in": 3600,
|
||||
"example_parameter": "example_value"
|
||||
}
|
||||
body = json.dumps(data)
|
||||
return [body.encode('utf-8')]
|
||||
|
||||
|
||||
def notif_endpoint_app(environ, start_response):
|
||||
path = environ['PATH_INFO']
|
||||
|
||||
if path == "/notification":
|
||||
return handle_notification(environ, start_response)
|
||||
|
||||
if path == "/token":
|
||||
return handle_token(environ, start_response)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
with make_server('', PORT, notif_endpoint_app) as httpd:
|
||||
httpd.serve_forever()
|
||||
httpd.handle_request()
|
||||
except KeyboardInterrupt:
|
||||
print()
|
||||
print("End.")
|
0
tacker/tests/functional/sol_v2/__init__.py
Normal file
0
tacker/tests/functional/sol_v2/__init__.py
Normal file
198
tacker/tests/functional/sol_v2/base_v2.py
Normal file
198
tacker/tests/functional/sol_v2/base_v2.py
Normal file
@ -0,0 +1,198 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import time
|
||||
import yaml
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import uuidutils
|
||||
from tempest.lib import base
|
||||
|
||||
from tacker.sol_refactored.common import http_client
|
||||
from tacker.sol_refactored.infra_drivers.openstack import heat_utils
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.tests.functional.sol_v2 import utils
|
||||
from tacker.tests import utils as base_utils
|
||||
from tacker import version
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
VNF_PACKAGE_UPLOAD_TIMEOUT = 300
|
||||
|
||||
|
||||
class BaseSolV2Test(base.BaseTestCase):
|
||||
"""Base test case class for SOL v2 functionl tests."""
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(BaseSolV2Test, cls).setUpClass()
|
||||
|
||||
cfg.CONF(args=['--config-file', '/etc/tacker/tacker.conf'],
|
||||
project='tacker',
|
||||
version='%%prog %s' % version.version_info.release_string())
|
||||
objects.register_all(False)
|
||||
|
||||
vim_info = cls.get_vim_info()
|
||||
cls.auth_url = vim_info.interfaceInfo['endpoint']
|
||||
|
||||
auth = http_client.KeystonePasswordAuthHandle(
|
||||
auth_url=vim_info.interfaceInfo['endpoint'],
|
||||
username=vim_info.accessInfo['username'],
|
||||
password=vim_info.accessInfo['password'],
|
||||
project_name=vim_info.accessInfo['project'],
|
||||
user_domain_name=vim_info.accessInfo['userDomain'],
|
||||
project_domain_name=vim_info.accessInfo['projectDomain']
|
||||
)
|
||||
cls.tacker_client = http_client.HttpClient(auth)
|
||||
cls.neutron_client = http_client.HttpClient(auth,
|
||||
service_type='network')
|
||||
cls.heat_client = heat_utils.HeatClient(vim_info)
|
||||
|
||||
@classmethod
|
||||
def get_vim_info(cls):
|
||||
vim_params = yaml.safe_load(base_utils.read_file('local-vim.yaml'))
|
||||
vim_params['auth_url'] += '/v3'
|
||||
|
||||
vim_info = objects.VimConnectionInfo(
|
||||
interfaceInfo={'endpoint': vim_params['auth_url']},
|
||||
accessInfo={
|
||||
'region': 'RegionOne',
|
||||
'project': vim_params['project_name'],
|
||||
'username': vim_params['username'],
|
||||
'password': vim_params['password'],
|
||||
'userDomain': vim_params['user_domain_name'],
|
||||
'projectDomain': vim_params['project_domain_name']
|
||||
}
|
||||
)
|
||||
|
||||
return vim_info
|
||||
|
||||
@classmethod
|
||||
def create_vnf_package(cls, sample_path, user_data={}, image_path=None):
|
||||
vnfd_id = uuidutils.generate_uuid()
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
|
||||
utils.make_zip(sample_path, tmp_dir, vnfd_id, image_path)
|
||||
|
||||
zip_file_name = os.path.basename(os.path.abspath(sample_path)) + ".zip"
|
||||
zip_file_path = os.path.join(tmp_dir, zip_file_name)
|
||||
|
||||
path = "/vnfpkgm/v1/vnf_packages"
|
||||
req_body = {'userDefinedData': user_data}
|
||||
resp, body = cls.tacker_client.do_request(
|
||||
path, "POST", expected_status=[201], body=req_body)
|
||||
|
||||
pkg_id = body['id']
|
||||
|
||||
with open(zip_file_path, 'rb') as fp:
|
||||
path = "/vnfpkgm/v1/vnf_packages/{}/package_content".format(pkg_id)
|
||||
resp, body = cls.tacker_client.do_request(
|
||||
path, "PUT", body=fp, content_type='application/zip',
|
||||
expected_status=[202])
|
||||
|
||||
# wait for onboard
|
||||
timeout = VNF_PACKAGE_UPLOAD_TIMEOUT
|
||||
start_time = int(time.time())
|
||||
path = "/vnfpkgm/v1/vnf_packages/{}".format(pkg_id)
|
||||
while True:
|
||||
resp, body = cls.tacker_client.do_request(
|
||||
path, "GET", expected_status=[200])
|
||||
if body['onboardingState'] == "ONBOARDED":
|
||||
break
|
||||
|
||||
if ((int(time.time()) - start_time) > timeout):
|
||||
raise Exception("Failed to onboard vnf package")
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
return pkg_id, vnfd_id
|
||||
|
||||
@classmethod
|
||||
def delete_vnf_package(cls, pkg_id):
|
||||
path = "/vnfpkgm/v1/vnf_packages/{}".format(pkg_id)
|
||||
req_body = {"operationalState": "DISABLED"}
|
||||
resp, _ = cls.tacker_client.do_request(
|
||||
path, "PATCH", body=req_body)
|
||||
if resp.status_code != 200:
|
||||
LOG.error("failed to set operationalState to DISABLED")
|
||||
return
|
||||
|
||||
cls.tacker_client.do_request(path, "DELETE")
|
||||
|
||||
def get_network_ids(self, networks):
|
||||
path = "/v2.0/networks"
|
||||
resp, body = self.neutron_client.do_request(path, "GET")
|
||||
net_ids = {}
|
||||
for net in body['networks']:
|
||||
if net['name'] in networks:
|
||||
net_ids[net['name']] = net['id']
|
||||
return net_ids
|
||||
|
||||
def get_subnet_ids(self, subnets):
|
||||
path = "/v2.0/subnets"
|
||||
resp, body = self.neutron_client.do_request(path, "GET")
|
||||
subnet_ids = {}
|
||||
for subnet in body['subnets']:
|
||||
if subnet['name'] in subnets:
|
||||
subnet_ids[subnet['name']] = subnet['id']
|
||||
return subnet_ids
|
||||
|
||||
def create_vnf_instance(self, req_body):
|
||||
path = "/vnflcm/v2/vnf_instances"
|
||||
return self.tacker_client.do_request(
|
||||
path, "POST", body=req_body, version="2.0.0")
|
||||
|
||||
def delete_vnf_instance(self, inst_id):
|
||||
path = "/vnflcm/v2/vnf_instances/{}".format(inst_id)
|
||||
return self.tacker_client.do_request(
|
||||
path, "DELETE", version="2.0.0")
|
||||
|
||||
def show_vnf_instance(self, inst_id):
|
||||
path = "/vnflcm/v2/vnf_instances/{}".format(inst_id)
|
||||
return self.tacker_client.do_request(
|
||||
path, "GET", version="2.0.0")
|
||||
|
||||
def instantiate_vnf_instance(self, inst_id, req_body):
|
||||
path = "/vnflcm/v2/vnf_instances/{}/instantiate".format(inst_id)
|
||||
return self.tacker_client.do_request(
|
||||
path, "POST", body=req_body, version="2.0.0")
|
||||
|
||||
def terminate_vnf_instance(self, inst_id, req_body):
|
||||
path = "/vnflcm/v2/vnf_instances/{}/terminate".format(inst_id)
|
||||
return self.tacker_client.do_request(
|
||||
path, "POST", body=req_body, version="2.0.0")
|
||||
|
||||
def wait_lcmocc_complete(self, lcmocc_id):
|
||||
# NOTE: It is not necessary to set timeout because the operation
|
||||
# itself set timeout and the state will become 'FAILED_TEMP'.
|
||||
path = "/vnflcm/v2/vnf_lcm_op_occs/{}".format(lcmocc_id)
|
||||
while True:
|
||||
time.sleep(5)
|
||||
_, body = self.tacker_client.do_request(
|
||||
path, "GET", expected_status=[200], version="2.0.0")
|
||||
state = body['operationState']
|
||||
if state == 'COMPLETED':
|
||||
return
|
||||
elif state in ['STARTING', 'PROCESSING']:
|
||||
continue
|
||||
else: # FAILED_TEMP or ROLLED_BACK
|
||||
raise Exception("Operation failed. state: %s" % state)
|
241
tacker/tests/functional/sol_v2/paramgen.py
Normal file
241
tacker/tests/functional/sol_v2/paramgen.py
Normal file
@ -0,0 +1,241 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
|
||||
def sample1_create(vnfd_id):
|
||||
return {
|
||||
"vnfdId": vnfd_id,
|
||||
"vnfInstanceName": "sample1",
|
||||
"vnfInstanceDescription": "test sample1"
|
||||
}
|
||||
|
||||
|
||||
def sample1_terminate():
|
||||
return {
|
||||
"terminationType": "FORCEFUL"
|
||||
}
|
||||
|
||||
|
||||
def sample1_instantiate(net_ids, subnet_ids, auth_url):
|
||||
ext_vl_1 = {
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"resourceId": net_ids['net0'],
|
||||
"extCps": [
|
||||
{
|
||||
"cpdId": "VDU1_CP1",
|
||||
"cpConfig": {
|
||||
"VDU1_CP1_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"numDynamicAddresses": 1}]}}]}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cpdId": "VDU2_CP1",
|
||||
"cpConfig": {
|
||||
"VDU2_CP1_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"fixedAddresses": ["10.10.0.101"]}]}}]}
|
||||
}
|
||||
}
|
||||
],
|
||||
}
|
||||
ext_vl_2 = {
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"resourceId": net_ids['net1'],
|
||||
"extCps": [
|
||||
{
|
||||
"cpdId": "VDU1_CP2",
|
||||
"cpConfig": {
|
||||
"VDU1_CP2_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"numDynamicAddresses": 1,
|
||||
"subnetId": subnet_ids['subnet1']}]}}]}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cpdId": "VDU2_CP2",
|
||||
"cpConfig": {
|
||||
"VDU2_CP2_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"fixedAddresses": ["10.10.1.101"],
|
||||
"subnetId": subnet_ids['subnet1']}]}}]}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return {
|
||||
"flavourId": "simple",
|
||||
"instantiationLevelId": "instantiation_level_1",
|
||||
"extVirtualLinks": [
|
||||
ext_vl_1,
|
||||
ext_vl_2
|
||||
],
|
||||
"extManagedVirtualLinks": [
|
||||
{
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"vnfVirtualLinkDescId": "internalVL1",
|
||||
"resourceId": net_ids['net_mgmt']
|
||||
},
|
||||
],
|
||||
"vimConnectionInfo": {
|
||||
"vim1": {
|
||||
"vimType": "ETSINFV.OPENSTACK_KEYSTONE.V_3",
|
||||
"vimId": uuidutils.generate_uuid(),
|
||||
"interfaceInfo": {"endpoint": auth_url},
|
||||
"accessInfo": {
|
||||
"username": "nfv_user",
|
||||
"region": "RegionOne",
|
||||
"password": "devstack",
|
||||
"project": "nfv",
|
||||
"projectDomain": "Default",
|
||||
"userDomain": "Default"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def sample2_create(vnfd_id):
|
||||
return {
|
||||
"vnfdId": vnfd_id,
|
||||
"vnfInstanceName": "sample2",
|
||||
"vnfInstanceDescription": "test sample2"
|
||||
}
|
||||
|
||||
|
||||
def sample2_terminate():
|
||||
return {
|
||||
"terminationType": "GRACEFUL",
|
||||
"gracefulTerminationTimeout": 5
|
||||
}
|
||||
|
||||
|
||||
def sample2_instantiate(net_ids, subnet_ids, auth_url):
|
||||
ext_vl_1 = {
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"resourceId": net_ids['net0'],
|
||||
"extCps": [
|
||||
{
|
||||
"cpdId": "VDU1_CP1",
|
||||
"cpConfig": {
|
||||
"VDU1_CP1_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"numDynamicAddresses": 1}]}}]}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cpdId": "VDU2_CP1",
|
||||
"cpConfig": {
|
||||
"VDU2_CP1_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"fixedAddresses": ["10.10.0.102"]}]}}]}
|
||||
}
|
||||
}
|
||||
],
|
||||
}
|
||||
ext_vl_2 = {
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"resourceId": net_ids['net1'],
|
||||
"extCps": [
|
||||
{
|
||||
"cpdId": "VDU1_CP2",
|
||||
"cpConfig": {
|
||||
"VDU1_CP2_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"numDynamicAddresses": 1,
|
||||
"subnetId": subnet_ids['subnet1']}]}}]}
|
||||
}
|
||||
},
|
||||
{
|
||||
"cpdId": "VDU2_CP2",
|
||||
"cpConfig": {
|
||||
"VDU2_CP2_1": {
|
||||
"cpProtocolData": [{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [{
|
||||
"type": "IPV4",
|
||||
"fixedAddresses": ["10.10.1.102"],
|
||||
"subnetId": subnet_ids['subnet1']}]}}]}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
return {
|
||||
"flavourId": "simple",
|
||||
"instantiationLevelId": "instantiation_level_1",
|
||||
"extVirtualLinks": [
|
||||
ext_vl_1,
|
||||
ext_vl_2
|
||||
],
|
||||
"extManagedVirtualLinks": [
|
||||
{
|
||||
"id": uuidutils.generate_uuid(),
|
||||
"vnfVirtualLinkDescId": "internalVL1",
|
||||
"resourceId": net_ids['net_mgmt']
|
||||
},
|
||||
],
|
||||
"vimConnectionInfo": {
|
||||
"vim1": {
|
||||
"vimType": "ETSINFV.OPENSTACK_KEYSTONE.V_3",
|
||||
"vimId": uuidutils.generate_uuid(),
|
||||
"interfaceInfo": {"endpoint": auth_url},
|
||||
"accessInfo": {
|
||||
"username": "nfv_user",
|
||||
"region": "RegionOne",
|
||||
"password": "devstack",
|
||||
"project": "nfv",
|
||||
"projectDomain": "Default",
|
||||
"userDomain": "Default"
|
||||
}
|
||||
}
|
||||
},
|
||||
"additionalParams": {
|
||||
"lcm-operation-user-data": "./UserData/userdata_default.py",
|
||||
"lcm-operation-user-data-class": "DefaultUserData"
|
||||
}
|
||||
}
|
@ -0,0 +1,87 @@
|
||||
heat_template_version: 2013-05-23
|
||||
description: 'VDU1 HOT for Sample VNF'
|
||||
|
||||
parameters:
|
||||
flavor:
|
||||
type: string
|
||||
image:
|
||||
type: string
|
||||
zone:
|
||||
type: string
|
||||
net1:
|
||||
type: string
|
||||
net2:
|
||||
type: string
|
||||
net3:
|
||||
type: string
|
||||
net4:
|
||||
type: string
|
||||
net5:
|
||||
type: string
|
||||
subnet:
|
||||
type: string
|
||||
|
||||
resources:
|
||||
VDU1:
|
||||
type: OS::Nova::Server
|
||||
properties:
|
||||
flavor: { get_param: flavor }
|
||||
name: VDU1
|
||||
block_device_mapping_v2: [{"volume_id": { get_resource: VirtualStorage }}]
|
||||
networks:
|
||||
- port:
|
||||
get_resource: VDU1_CP1
|
||||
- port:
|
||||
get_resource: VDU1_CP2
|
||||
# replace the following line to Port ID when extmanagedVLs' Ports are specified in instantiatevnfrequest
|
||||
- port:
|
||||
get_resource: VDU1_CP3
|
||||
- port:
|
||||
get_resource: VDU1_CP4
|
||||
- port:
|
||||
get_resource: VDU1_CP5
|
||||
|
||||
|
||||
availability_zone: { get_param: zone }
|
||||
|
||||
VirtualStorage:
|
||||
type: OS::Cinder::Volume
|
||||
properties:
|
||||
image: { get_param: image }
|
||||
size: 1
|
||||
volume_type: { get_resource: multi }
|
||||
multi:
|
||||
type: OS::Cinder::VolumeType
|
||||
properties:
|
||||
name: { get_resource: VDU1_CP1 }
|
||||
metadata: { multiattach: "<is> True" }
|
||||
|
||||
# extVL without FixedIP or with numDynamicAddresses
|
||||
VDU1_CP1:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net1 }
|
||||
|
||||
# extVL with numDynamicAddresses and subnet
|
||||
VDU1_CP2:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net2 }
|
||||
fixed_ips:
|
||||
- subnet: { get_param: subnet}
|
||||
|
||||
# delete the following line when extmanagedVLs' Ports are specified in instantiatevnfrequest
|
||||
VDU1_CP3:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net3 }
|
||||
|
||||
VDU1_CP4:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net4 }
|
||||
|
||||
VDU1_CP5:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net5 }
|
@ -0,0 +1,125 @@
|
||||
heat_template_version: 2013-05-23
|
||||
description: 'Simple Base HOT for Sample VNF'
|
||||
|
||||
parameters:
|
||||
nfv:
|
||||
type: json
|
||||
|
||||
resources:
|
||||
VDU1_scale_group:
|
||||
type: OS::Heat::AutoScalingGroup
|
||||
properties:
|
||||
min_size: 1
|
||||
max_size: 3
|
||||
desired_capacity: 1
|
||||
resource:
|
||||
type: VDU1.yaml
|
||||
properties:
|
||||
flavor: { get_param: [ nfv, VDU, VDU1, computeFlavourId ] }
|
||||
image: { get_param: [ nfv, VDU, VirtualStorage, vcImageId ] }
|
||||
zone: { get_param: [ nfv, VDU, VDU1, locationConstraints] }
|
||||
net1: { get_param: [ nfv, CP, VDU1_CP1, network] }
|
||||
net2: { get_param: [ nfv, CP, VDU1_CP2, network ] }
|
||||
subnet: { get_param: [nfv, CP, VDU1_CP2, fixed_ips, 0, subnet ]}
|
||||
net3: { get_resource: internalVL1 }
|
||||
net4: { get_resource: internalVL2 }
|
||||
net5: { get_resource: internalVL3 }
|
||||
|
||||
VDU1_scale_out:
|
||||
type: OS::Heat::ScalingPolicy
|
||||
properties:
|
||||
scaling_adjustment: 1
|
||||
auto_scaling_group_id:
|
||||
get_resource: VDU1_scale_group
|
||||
adjustment_type: change_in_capacity
|
||||
VDU1_scale_in:
|
||||
type: OS::Heat::ScalingPolicy
|
||||
properties:
|
||||
scaling_adjustment: -1
|
||||
auto_scaling_group_id:
|
||||
get_resource: VDU1_scale_group
|
||||
adjustment_type: change_in_capacity
|
||||
|
||||
VDU2:
|
||||
type: OS::Nova::Server
|
||||
properties:
|
||||
flavor: { get_param: [ nfv, VDU, VDU2, computeFlavourId ] }
|
||||
image: { get_param: [ nfv, VDU, VDU2, vcImageId] }
|
||||
availability_zone: { get_param: [ nfv, VDU, VDU2, locationConstraints ] }
|
||||
networks:
|
||||
- port:
|
||||
get_resource: VDU2_CP1
|
||||
- port:
|
||||
get_resource: VDU2_CP2
|
||||
- port:
|
||||
get_resource: VDU2_CP3
|
||||
- port:
|
||||
get_resource: VDU2_CP4
|
||||
- port:
|
||||
get_resource: VDU2_CP5
|
||||
|
||||
# extVL with FixedIP
|
||||
VDU2_CP1:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: [ nfv, CP, VDU2_CP1, network ] }
|
||||
fixed_ips:
|
||||
- ip_address: { get_param: [nfv, CP, VDU2_CP1, fixed_ips, 0, ip_address]}
|
||||
|
||||
# extVL with FixedIP and Subnet
|
||||
VDU2_CP2:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: [ nfv, CP, VDU2_CP2, network ] }
|
||||
fixed_ips:
|
||||
- ip_address: { get_param: [nfv, CP, VDU2_CP2, fixed_ips, 0, ip_address]}
|
||||
subnet: { get_param: [nfv, CP, VDU2_CP2, fixed_ips, 0, subnet]}
|
||||
|
||||
VDU2_CP3:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL1 }
|
||||
|
||||
VDU2_CP4:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL2 }
|
||||
|
||||
VDU2_CP5:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL3 }
|
||||
|
||||
# delete the following lines when extmanagedVLs are specified in instantiatevnfrequest
|
||||
internalVL1:
|
||||
type: OS::Neutron::Net
|
||||
internalVL2:
|
||||
type: OS::Neutron::Net
|
||||
internalVL3:
|
||||
type: OS::Neutron::Net
|
||||
|
||||
internalVL1_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL1
|
||||
cidr: 192.168.3.0/24
|
||||
internalVL2_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL2
|
||||
cidr: 192.168.4.0/24
|
||||
internalVL3_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL3
|
||||
cidr: 192.168.5.0/24
|
||||
outputs: {}
|
@ -0,0 +1,202 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
description: ETSI NFV SOL 001 common types definitions version 2.6.1
|
||||
metadata:
|
||||
template_name: etsi_nfv_sol001_common_types
|
||||
template_author: ETSI_NFV
|
||||
template_version: 2.6.1
|
||||
|
||||
data_types:
|
||||
tosca.datatypes.nfv.L2AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes the information on the MAC addresses to be assigned to a connection point.
|
||||
properties:
|
||||
mac_address_assignment:
|
||||
type: boolean
|
||||
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
|
||||
required: true
|
||||
|
||||
tosca.datatypes.nfv.L3AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
|
||||
properties:
|
||||
ip_address_assignment:
|
||||
type: boolean
|
||||
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
|
||||
required: true
|
||||
floating_ip_activated:
|
||||
type: boolean
|
||||
description: Specifies if the floating IP scheme is activated on the Connection Point or not
|
||||
required: true
|
||||
ip_address_type:
|
||||
type: string
|
||||
description: Defines address type. The address type should be aligned with the address type supported by the layer_protocols properties of the parent VnfExtCp
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ ipv4, ipv6 ]
|
||||
number_of_ip_address:
|
||||
type: integer
|
||||
description: Minimum number of IP addresses to be assigned
|
||||
required: false
|
||||
constraints:
|
||||
- greater_than: 0
|
||||
|
||||
tosca.datatypes.nfv.AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes information about the addressing scheme and parameters applicable to a CP
|
||||
properties:
|
||||
address_type:
|
||||
type: string
|
||||
description: Describes the type of the address to be assigned to a connection point. The content type shall be aligned with the address type supported by the layerProtocol property of the connection point
|
||||
required: true
|
||||
constraints:
|
||||
- valid_values: [ mac_address, ip_address ]
|
||||
l2_address_data:
|
||||
type: tosca.datatypes.nfv.L2AddressData
|
||||
description: Provides the information on the MAC addresses to be assigned to a connection point.
|
||||
required: false
|
||||
l3_address_data:
|
||||
type: tosca.datatypes.nfv.L3AddressData
|
||||
description: Provides the information on the IP addresses to be assigned to a connection point
|
||||
required: false
|
||||
|
||||
tosca.datatypes.nfv.ConnectivityType:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes additional connectivity information of a virtualLink
|
||||
properties:
|
||||
layer_protocols:
|
||||
type: list
|
||||
description: Identifies the protocol a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire).The top layer protocol of the virtualLink protocol stack shall always be provided. The lower layer protocols may be included when there are specific requirements on these layers.
|
||||
required: true
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
flow_pattern:
|
||||
type: string
|
||||
description: Identifies the flow pattern of the connectivity
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ line, tree, mesh ]
|
||||
|
||||
tosca.datatypes.nfv.LinkBitrateRequirements:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes the requirements in terms of bitrate for a virtual link
|
||||
properties:
|
||||
root:
|
||||
type: integer # in bits per second
|
||||
description: Specifies the throughput requirement in bits per second of the link (e.g. bitrate of E-Line, root bitrate of E-Tree, aggregate capacity of E-LAN).
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
leaf:
|
||||
type: integer # in bits per second
|
||||
description: Specifies the throughput requirement in bits per second of leaf connections to the link when applicable to the connectivity type (e.g. for E-Tree and E LAN branches).
|
||||
required: false
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
|
||||
tosca.datatypes.nfv.CpProtocolData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information
|
||||
properties:
|
||||
associated_layer_protocol:
|
||||
type: string
|
||||
required: true
|
||||
description: One of the values of the property layer_protocols of the CP
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
address_data:
|
||||
type: list
|
||||
description: Provides information on the addresses to be assigned to the CP
|
||||
entry_schema:
|
||||
type: tosca.datatypes.nfv.AddressData
|
||||
required: false
|
||||
|
||||
tosca.datatypes.nfv.VnfProfile:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF.
|
||||
properties:
|
||||
instantiation_level:
|
||||
type: string
|
||||
description: Identifier of the instantiation level of the VNF DF to be used for instantiation. If not present, the default instantiation level as declared in the VNFD shall be used.
|
||||
required: false
|
||||
min_number_of_instances:
|
||||
type: integer
|
||||
description: Minimum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
max_number_of_instances:
|
||||
type: integer
|
||||
description: Maximum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
|
||||
tosca.datatypes.nfv.Qos:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes QoS data for a given VL used in a VNF deployment flavour
|
||||
properties:
|
||||
latency:
|
||||
type: scalar-unit.time #Number
|
||||
description: Specifies the maximum latency
|
||||
required: true
|
||||
constraints:
|
||||
- greater_than: 0 s
|
||||
packet_delay_variation:
|
||||
type: scalar-unit.time #Number
|
||||
description: Specifies the maximum jitter
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0 s
|
||||
packet_loss_ratio:
|
||||
type: float
|
||||
description: Specifies the maximum packet loss ratio
|
||||
required: false
|
||||
constraints:
|
||||
- in_range: [ 0.0, 1.0 ]
|
||||
|
||||
capability_types:
|
||||
tosca.capabilities.nfv.VirtualLinkable:
|
||||
derived_from: tosca.capabilities.Node
|
||||
description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type
|
||||
|
||||
relationship_types:
|
||||
tosca.relationships.nfv.VirtualLinksTo:
|
||||
derived_from: tosca.relationships.DependsOn
|
||||
description: Represents an association relationship between the VduCp and VnfVirtualLink node types
|
||||
valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
|
||||
|
||||
node_types:
|
||||
tosca.nodes.nfv.Cp:
|
||||
derived_from: tosca.nodes.Root
|
||||
description: Provides information regarding the purpose of the connection point
|
||||
properties:
|
||||
layer_protocols:
|
||||
type: list
|
||||
description: Identifies which protocol the connection point uses for connectivity purposes
|
||||
required: true
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
|
||||
type: string
|
||||
description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ root, leaf ]
|
||||
description:
|
||||
type: string
|
||||
description: Provides human-readable information on the purpose of the connection point
|
||||
required: false
|
||||
protocol:
|
||||
type: list
|
||||
description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
|
||||
required: false
|
||||
entry_schema:
|
||||
type: tosca.datatypes.nfv.CpProtocolData
|
||||
trunk_mode:
|
||||
type: boolean
|
||||
description: Provides information about whether the CP instantiated from this Cp is in Trunk mode (802.1Q or other), When operating in "trunk mode", the Cp is capable of carrying traffic for several VLANs. Absence of this property implies that trunkMode is not configured for the Cp i.e. It is equivalent to boolean value "false".
|
||||
required: false
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,406 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: Simple deployment flavour for Sample VNF
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
- v2_sample1_types.yaml
|
||||
|
||||
topology_template:
|
||||
inputs:
|
||||
descriptor_id:
|
||||
type: string
|
||||
descriptor_version:
|
||||
type: string
|
||||
provider:
|
||||
type: string
|
||||
product_name:
|
||||
type: string
|
||||
software_version:
|
||||
type: string
|
||||
vnfm_info:
|
||||
type: list
|
||||
entry_schema:
|
||||
type: string
|
||||
flavour_id:
|
||||
type: string
|
||||
flavour_description:
|
||||
type: string
|
||||
|
||||
substitution_mappings:
|
||||
node_type: company.provider.VNF
|
||||
properties:
|
||||
flavour_id: simple
|
||||
requirements:
|
||||
virtual_link_external1_1: [ VDU1_CP1, virtual_link ]
|
||||
virtual_link_external1_2: [ VDU2_CP1, virtual_link ]
|
||||
virtual_link_external2_1: [ VDU1_CP2, virtual_link ]
|
||||
virtual_link_external2_2: [ VDU2_CP2, virtual_link ]
|
||||
|
||||
node_templates:
|
||||
VNF:
|
||||
type: company.provider.VNF
|
||||
properties:
|
||||
flavour_description: A simple flavour
|
||||
interfaces:
|
||||
Vnflcm:
|
||||
instantiate: []
|
||||
instantiate_start: []
|
||||
instantiate_end: []
|
||||
terminate: []
|
||||
terminate_start: []
|
||||
terminate_end: []
|
||||
modify_information: []
|
||||
modify_information_start: []
|
||||
modify_information_end: []
|
||||
|
||||
VDU1:
|
||||
type: tosca.nodes.nfv.Vdu.Compute
|
||||
properties:
|
||||
name: VDU1
|
||||
description: VDU1 compute node
|
||||
vdu_profile:
|
||||
min_number_of_instances: 1
|
||||
max_number_of_instances: 3
|
||||
capabilities:
|
||||
virtual_compute:
|
||||
properties:
|
||||
requested_additional_capabilities:
|
||||
properties:
|
||||
requested_additional_capability_name: m1.tiny
|
||||
support_mandatory: true
|
||||
target_performance_parameters:
|
||||
entry_schema: test
|
||||
virtual_memory:
|
||||
virtual_mem_size: 512 MB
|
||||
virtual_cpu:
|
||||
num_virtual_cpu: 1
|
||||
virtual_local_storage:
|
||||
- size_of_storage: 3 GB
|
||||
requirements:
|
||||
- virtual_storage: VirtualStorage
|
||||
|
||||
VDU2:
|
||||
type: tosca.nodes.nfv.Vdu.Compute
|
||||
properties:
|
||||
name: VDU2
|
||||
description: VDU2 compute node
|
||||
vdu_profile:
|
||||
min_number_of_instances: 1
|
||||
max_number_of_instances: 1
|
||||
sw_image_data:
|
||||
name: VDU2-image
|
||||
version: '0.5.2'
|
||||
checksum:
|
||||
algorithm: sha-256
|
||||
hash: 932fcae93574e242dc3d772d5235061747dfe537668443a1f0567d893614b464
|
||||
container_format: bare
|
||||
disk_format: qcow2
|
||||
min_disk: 0 GB
|
||||
min_ram: 256 MB
|
||||
size: 12 GB
|
||||
capabilities:
|
||||
virtual_compute:
|
||||
properties:
|
||||
requested_additional_capabilities:
|
||||
properties:
|
||||
requested_additional_capability_name: m1.tiny
|
||||
support_mandatory: true
|
||||
target_performance_parameters:
|
||||
entry_schema: test
|
||||
virtual_memory:
|
||||
virtual_mem_size: 512 MB
|
||||
virtual_cpu:
|
||||
num_virtual_cpu: 1
|
||||
virtual_local_storage:
|
||||
- size_of_storage: 3 GB
|
||||
artifacts:
|
||||
sw_image:
|
||||
type: tosca.artifacts.nfv.SwImage
|
||||
file: ../Files/images/cirros-0.5.2-x86_64-disk.img
|
||||
|
||||
VirtualStorage:
|
||||
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
|
||||
properties:
|
||||
virtual_block_storage_data:
|
||||
size_of_storage: 1 GB
|
||||
rdma_enabled: true
|
||||
sw_image_data:
|
||||
name: cirros-0.5.2-x86_64-disk
|
||||
version: '0.5.2'
|
||||
checksum:
|
||||
algorithm: sha-256
|
||||
hash: 932fcae93574e242dc3d772d5235061747dfe537668443a1f0567d893614b464
|
||||
container_format: bare
|
||||
disk_format: qcow2
|
||||
min_disk: 0 GB
|
||||
min_ram: 256 MB
|
||||
size: 12 GB
|
||||
|
||||
VDU1_CP1:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 0
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
|
||||
VDU1_CP2:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 1
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
|
||||
VDU1_CP3:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 2
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL1
|
||||
|
||||
VDU1_CP4:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 3
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL2
|
||||
|
||||
VDU1_CP5:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 4
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL3
|
||||
|
||||
VDU2_CP1:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 0
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
|
||||
VDU2_CP2:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 1
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
|
||||
VDU2_CP3:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 2
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL1
|
||||
|
||||
VDU2_CP4:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 3
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL2
|
||||
|
||||
VDU2_CP5:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 4
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL3
|
||||
|
||||
internalVL1:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: External Managed Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.3.0/24
|
||||
|
||||
internalVL2:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: External Managed Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.4.0/24
|
||||
|
||||
internalVL3:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: Internal Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.5.0/24
|
||||
|
||||
groups:
|
||||
affinityOrAntiAffinityGroup1:
|
||||
type: tosca.groups.nfv.PlacementGroup
|
||||
members: [ VDU1, VDU2 ]
|
||||
|
||||
policies:
|
||||
- scaling_aspects:
|
||||
type: tosca.policies.nfv.ScalingAspects
|
||||
properties:
|
||||
aspects:
|
||||
VDU1_scale:
|
||||
name: VDU1_scale
|
||||
description: VDU1 scaling aspect
|
||||
max_scale_level: 2
|
||||
step_deltas:
|
||||
- delta_1
|
||||
|
||||
- VDU1_initial_delta:
|
||||
type: tosca.policies.nfv.VduInitialDelta
|
||||
properties:
|
||||
initial_delta:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- VDU2_initial_delta:
|
||||
type: tosca.policies.nfv.VduInitialDelta
|
||||
properties:
|
||||
initial_delta:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU2 ]
|
||||
|
||||
- VDU1_scaling_aspect_deltas:
|
||||
type: tosca.policies.nfv.VduScalingAspectDeltas
|
||||
properties:
|
||||
aspect: VDU1_scale
|
||||
deltas:
|
||||
delta_1:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- instantiation_levels:
|
||||
type: tosca.policies.nfv.InstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
description: Smallest size
|
||||
scale_info:
|
||||
VDU1_scale:
|
||||
scale_level: 0
|
||||
instantiation_level_2:
|
||||
description: Largest size
|
||||
scale_info:
|
||||
VDU1_scale:
|
||||
scale_level: 2
|
||||
default_level: instantiation_level_1
|
||||
|
||||
- VDU1_instantiation_levels:
|
||||
type: tosca.policies.nfv.VduInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
number_of_instances: 1
|
||||
instantiation_level_2:
|
||||
number_of_instances: 3
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- VDU2_instantiation_levels:
|
||||
type: tosca.policies.nfv.VduInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
number_of_instances: 1
|
||||
instantiation_level_2:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU2 ]
|
||||
|
||||
- internalVL1_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL1 ]
|
||||
|
||||
- internalVL2_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL2 ]
|
||||
|
||||
- internalVL3_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL3 ]
|
||||
|
||||
- policy_antiaffinity_group:
|
||||
type: tosca.policies.nfv.AntiAffinityRule
|
||||
targets: [ affinityOrAntiAffinityGroup1 ]
|
||||
properties:
|
||||
scope: zone
|
@ -0,0 +1,31 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: Sample VNF
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
- v2_sample1_types.yaml
|
||||
- v2_sample1_df_simple.yaml
|
||||
|
||||
topology_template:
|
||||
inputs:
|
||||
selected_flavour:
|
||||
type: string
|
||||
description: VNF deployment flavour selected by the consumer. It is provided in the API
|
||||
|
||||
node_templates:
|
||||
VNF:
|
||||
type: company.provider.VNF
|
||||
properties:
|
||||
flavour_id: { get_input: selected_flavour }
|
||||
descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d7000000
|
||||
provider: Company
|
||||
product_name: Sample VNF
|
||||
software_version: '1.0'
|
||||
descriptor_version: '1.0'
|
||||
vnfm_info:
|
||||
- Tacker
|
||||
requirements:
|
||||
#- virtual_link_external # mapped in lower-level templates
|
||||
#- virtual_link_internal # mapped in lower-level templates
|
@ -0,0 +1,55 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: VNF type definition
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
|
||||
node_types:
|
||||
company.provider.VNF:
|
||||
derived_from: tosca.nodes.nfv.VNF
|
||||
properties:
|
||||
descriptor_id:
|
||||
type: string
|
||||
constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d7000000 ] ]
|
||||
default: b1bb0ce7-ebca-4fa7-95ed-4840d7000000
|
||||
descriptor_version:
|
||||
type: string
|
||||
constraints: [ valid_values: [ '1.0' ] ]
|
||||
default: '1.0'
|
||||
provider:
|
||||
type: string
|
||||
constraints: [ valid_values: [ 'Company' ] ]
|
||||
default: 'Company'
|
||||
product_name:
|
||||
type: string
|
||||
constraints: [ valid_values: [ 'Sample VNF' ] ]
|
||||
default: 'Sample VNF'
|
||||
software_version:
|
||||
type: string
|
||||
constraints: [ valid_values: [ '1.0' ] ]
|
||||
default: '1.0'
|
||||
vnfm_info:
|
||||
type: list
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints: [ valid_values: [ Tacker ] ]
|
||||
default: [ Tacker ]
|
||||
flavour_id:
|
||||
type: string
|
||||
constraints: [ valid_values: [ simple ] ]
|
||||
default: simple
|
||||
flavour_description:
|
||||
type: string
|
||||
default: "flavour"
|
||||
requirements:
|
||||
- virtual_link_external1:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
- virtual_link_external2:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
- virtual_link_internal:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
interfaces:
|
||||
Vnflcm:
|
||||
type: tosca.interfaces.nfv.Vnflcm
|
@ -0,0 +1,4 @@
|
||||
TOSCA-Meta-File-Version: 1.0
|
||||
CSAR-Version: 1.1
|
||||
Created-by: Onboarding portal
|
||||
Entry-Definitions: Definitions/v2_sample1_top.vnfd.yaml
|
58
tacker/tests/functional/sol_v2/samples/sample1/pkggen.py
Normal file
58
tacker/tests/functional/sol_v2/samples/sample1/pkggen.py
Normal file
@ -0,0 +1,58 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.tests.functional.sol_v2 import paramgen
|
||||
from tacker.tests.functional.sol_v2 import utils
|
||||
|
||||
|
||||
zip_file_name = os.path.basename(os.path.abspath(".")) + '.zip'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
vnfd_id = uuidutils.generate_uuid()
|
||||
|
||||
# tacker/tests/etc...
|
||||
# /functional/sol_v2/samples/smapleX
|
||||
image_dir = "../../../../etc/samples/etsi/nfv/common/Files/images/"
|
||||
image_file = "cirros-0.5.2-x86_64-disk.img"
|
||||
image_path = os.path.abspath(image_dir + image_file)
|
||||
|
||||
utils.make_zip(".", tmp_dir, vnfd_id, image_path)
|
||||
|
||||
shutil.move(os.path.join(tmp_dir, zip_file_name), ".")
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
create_req = paramgen.sample1_create(vnfd_id)
|
||||
terminate_req = paramgen.sample1_terminate()
|
||||
|
||||
net_ids = utils.get_network_ids(['net0', 'net1', 'net_mgmt'])
|
||||
subnet_ids = utils.get_subnet_ids(['subnet0', 'subnet1'])
|
||||
|
||||
instantiate_req = paramgen.sample1_instantiate(
|
||||
net_ids, subnet_ids, "http://localhost/identity/v3")
|
||||
|
||||
with open("create_req", "w") as f:
|
||||
f.write(json.dumps(create_req, indent=2))
|
||||
|
||||
with open("terminate_req", "w") as f:
|
||||
f.write(json.dumps(terminate_req, indent=2))
|
||||
|
||||
with open("instantiate_req", "w") as f:
|
||||
f.write(json.dumps(instantiate_req, indent=2))
|
@ -0,0 +1,89 @@
|
||||
heat_template_version: 2013-05-23
|
||||
description: 'VDU1 HOT for Sample VNF'
|
||||
|
||||
parameters:
|
||||
flavor:
|
||||
type: string
|
||||
image:
|
||||
type: string
|
||||
net1:
|
||||
type: string
|
||||
net2:
|
||||
type: string
|
||||
net3:
|
||||
type: string
|
||||
net4:
|
||||
type: string
|
||||
net5:
|
||||
type: string
|
||||
subnet:
|
||||
type: string
|
||||
# uncomment when BUG "https://storyboard.openstack.org/#!/story/2009164" fixed
|
||||
# affinity:
|
||||
# type: string
|
||||
|
||||
resources:
|
||||
VDU1:
|
||||
type: OS::Nova::Server
|
||||
properties:
|
||||
flavor: { get_param: flavor }
|
||||
name: VDU1
|
||||
block_device_mapping_v2: [{"volume_id": { get_resource: VirtualStorage }}]
|
||||
networks:
|
||||
- port:
|
||||
get_resource: VDU1_CP1
|
||||
- port:
|
||||
get_resource: VDU1_CP2
|
||||
# replace the following line to Port ID when extmanagedVLs' Ports are specified in instantiatevnfrequest
|
||||
- port:
|
||||
get_resource: VDU1_CP3
|
||||
- port:
|
||||
get_resource: VDU1_CP4
|
||||
- port:
|
||||
get_resource: VDU1_CP5
|
||||
|
||||
# uncomment when BUG "https://storyboard.openstack.org/#!/story/2009164" fixed
|
||||
# scheduler_hints:
|
||||
# group: {get_param: affinity }
|
||||
|
||||
VirtualStorage:
|
||||
type: OS::Cinder::Volume
|
||||
properties:
|
||||
image: { get_param: image }
|
||||
size: 1
|
||||
volume_type: { get_resource: multi }
|
||||
multi:
|
||||
type: OS::Cinder::VolumeType
|
||||
properties:
|
||||
name: { get_resource: VDU1_CP1 }
|
||||
metadata: { multiattach: "<is> True" }
|
||||
|
||||
# extVL without FixedIP or with numDynamicAddresses
|
||||
VDU1_CP1:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net1 }
|
||||
|
||||
# extVL with numDynamicAddresses and subnet
|
||||
VDU1_CP2:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net2 }
|
||||
fixed_ips:
|
||||
- subnet: { get_param: subnet}
|
||||
|
||||
# CPs of internal VLs are deleted when extmangaedVLs and port are specified in instantiatevnfrequest
|
||||
VDU1_CP3:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net3 }
|
||||
|
||||
VDU1_CP4:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net4 }
|
||||
|
||||
VDU1_CP5:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: net5 }
|
@ -0,0 +1,136 @@
|
||||
heat_template_version: 2013-05-23
|
||||
description: 'Simple Base HOT for Sample VNF'
|
||||
|
||||
parameters:
|
||||
nfv:
|
||||
type: json
|
||||
|
||||
resources:
|
||||
VDU1_scale_group:
|
||||
type: OS::Heat::AutoScalingGroup
|
||||
properties:
|
||||
min_size: 1
|
||||
max_size: 3
|
||||
desired_capacity: 1
|
||||
resource:
|
||||
type: VDU1.yaml
|
||||
properties:
|
||||
flavor: { get_param: [ nfv, VDU, VDU1, computeFlavourId ] }
|
||||
image: { get_param: [ nfv, VDU, VirtualStorage, vcImageId ] }
|
||||
net1: { get_param: [ nfv, CP, VDU1_CP1, network ] }
|
||||
net2: { get_param: [ nfv, CP, VDU1_CP2, network ] }
|
||||
subnet: { get_param: [nfv, CP, VDU1_CP2, fixed_ips, 0, subnet ]}
|
||||
net3: { get_resource: internalVL1 }
|
||||
net4: { get_resource: internalVL2 }
|
||||
net5: { get_resource: internalVL3 }
|
||||
# uncomment when BUG "https://storyboard.openstack.org/#!/story/2009164" fixed
|
||||
# affinity: { get_resource: nfvi_node_affinity }
|
||||
VDU1_scale_out:
|
||||
type: OS::Heat::ScalingPolicy
|
||||
properties:
|
||||
scaling_adjustment: 1
|
||||
auto_scaling_group_id:
|
||||
get_resource: VDU1_scale_group
|
||||
adjustment_type: change_in_capacity
|
||||
VDU1_scale_in:
|
||||
type: OS::Heat::ScalingPolicy
|
||||
properties:
|
||||
scaling_adjustment: -1
|
||||
auto_scaling_group_id:
|
||||
get_resource: VDU1_scale_group
|
||||
adjustment_type: change_in_capacity
|
||||
|
||||
VDU2:
|
||||
type: OS::Nova::Server
|
||||
properties:
|
||||
flavor: { get_param: [ nfv, VDU, VDU2, computeFlavourId ] }
|
||||
image: { get_param: [ nfv, VDU, VDU2, vcImageId] }
|
||||
networks:
|
||||
- port:
|
||||
get_resource: VDU2_CP1
|
||||
- port:
|
||||
get_resource: VDU2_CP2
|
||||
- port:
|
||||
get_resource: VDU2_CP3
|
||||
- port:
|
||||
get_resource: VDU2_CP4
|
||||
- port:
|
||||
get_resource: VDU2_CP5
|
||||
# uncomment when BUG "https://storyboard.openstack.org/#!/story/2009164" fixed
|
||||
# scheduler_hints:
|
||||
# group: {get_resource: nfvi_node_affinity }
|
||||
|
||||
# extVL with FixedIP
|
||||
VDU2_CP1:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: [ nfv, CP, VDU2_CP1, network ] }
|
||||
fixed_ips:
|
||||
- ip_address: { get_param: [nfv, CP, VDU2_CP1, fixed_ips, 0, ip_address]}
|
||||
|
||||
# extVL with FixedIP and Subnet
|
||||
VDU2_CP2:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
network: { get_param: [ nfv, CP, VDU2_CP2, network ] }
|
||||
fixed_ips:
|
||||
- ip_address: { get_param: [nfv, CP, VDU2_CP2, fixed_ips, 0, ip_address]}
|
||||
subnet: { get_param: [nfv, CP, VDU2_CP2, fixed_ips, 0, subnet]}
|
||||
|
||||
VDU2_CP3:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL1 }
|
||||
|
||||
VDU2_CP4:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL2 }
|
||||
|
||||
VDU2_CP5:
|
||||
type: OS::Neutron::Port
|
||||
properties:
|
||||
# replace the following line to VL's ID when extmanagedVLs are specified in instantiatevnfrequest
|
||||
network: { get_resource: internalVL3 }
|
||||
|
||||
# delete the following lines when extmanagedVLs are specified in instantiatevnfrequest
|
||||
internalVL1:
|
||||
type: OS::Neutron::Net
|
||||
internalVL2:
|
||||
type: OS::Neutron::Net
|
||||
internalVL3:
|
||||
type: OS::Neutron::Net
|
||||
|
||||
|
||||
internalVL1_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL1
|
||||
cidr: 192.168.3.0/24
|
||||
internalVL2_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL2
|
||||
cidr: 192.168.4.0/24
|
||||
internalVL3_subnet:
|
||||
type: OS::Neutron::Subnet
|
||||
properties:
|
||||
ip_version: 4
|
||||
network:
|
||||
get_resource: internalVL3
|
||||
cidr: 192.168.5.0/24
|
||||
|
||||
# uncomment when BUG "https://storyboard.openstack.org/#!/story/2009164" fixed
|
||||
# nfvi_node_affinity:
|
||||
# type: OS::Nova::ServerGroup
|
||||
# properties:
|
||||
# name: nfvi_node_affinity
|
||||
# policies: [ 'affinity' ]
|
||||
|
||||
outputs: {}
|
@ -0,0 +1,202 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
description: ETSI NFV SOL 001 common types definitions version 2.6.1
|
||||
metadata:
|
||||
template_name: etsi_nfv_sol001_common_types
|
||||
template_author: ETSI_NFV
|
||||
template_version: 2.6.1
|
||||
|
||||
data_types:
|
||||
tosca.datatypes.nfv.L2AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes the information on the MAC addresses to be assigned to a connection point.
|
||||
properties:
|
||||
mac_address_assignment:
|
||||
type: boolean
|
||||
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
|
||||
required: true
|
||||
|
||||
tosca.datatypes.nfv.L3AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
|
||||
properties:
|
||||
ip_address_assignment:
|
||||
type: boolean
|
||||
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
|
||||
required: true
|
||||
floating_ip_activated:
|
||||
type: boolean
|
||||
description: Specifies if the floating IP scheme is activated on the Connection Point or not
|
||||
required: true
|
||||
ip_address_type:
|
||||
type: string
|
||||
description: Defines address type. The address type should be aligned with the address type supported by the layer_protocols properties of the parent VnfExtCp
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ ipv4, ipv6 ]
|
||||
number_of_ip_address:
|
||||
type: integer
|
||||
description: Minimum number of IP addresses to be assigned
|
||||
required: false
|
||||
constraints:
|
||||
- greater_than: 0
|
||||
|
||||
tosca.datatypes.nfv.AddressData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes information about the addressing scheme and parameters applicable to a CP
|
||||
properties:
|
||||
address_type:
|
||||
type: string
|
||||
description: Describes the type of the address to be assigned to a connection point. The content type shall be aligned with the address type supported by the layerProtocol property of the connection point
|
||||
required: true
|
||||
constraints:
|
||||
- valid_values: [ mac_address, ip_address ]
|
||||
l2_address_data:
|
||||
type: tosca.datatypes.nfv.L2AddressData
|
||||
description: Provides the information on the MAC addresses to be assigned to a connection point.
|
||||
required: false
|
||||
l3_address_data:
|
||||
type: tosca.datatypes.nfv.L3AddressData
|
||||
description: Provides the information on the IP addresses to be assigned to a connection point
|
||||
required: false
|
||||
|
||||
tosca.datatypes.nfv.ConnectivityType:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes additional connectivity information of a virtualLink
|
||||
properties:
|
||||
layer_protocols:
|
||||
type: list
|
||||
description: Identifies the protocol a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire).The top layer protocol of the virtualLink protocol stack shall always be provided. The lower layer protocols may be included when there are specific requirements on these layers.
|
||||
required: true
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
flow_pattern:
|
||||
type: string
|
||||
description: Identifies the flow pattern of the connectivity
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ line, tree, mesh ]
|
||||
|
||||
tosca.datatypes.nfv.LinkBitrateRequirements:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes the requirements in terms of bitrate for a virtual link
|
||||
properties:
|
||||
root:
|
||||
type: integer # in bits per second
|
||||
description: Specifies the throughput requirement in bits per second of the link (e.g. bitrate of E-Line, root bitrate of E-Tree, aggregate capacity of E-LAN).
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
leaf:
|
||||
type: integer # in bits per second
|
||||
description: Specifies the throughput requirement in bits per second of leaf connections to the link when applicable to the connectivity type (e.g. for E-Tree and E LAN branches).
|
||||
required: false
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
|
||||
tosca.datatypes.nfv.CpProtocolData:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information
|
||||
properties:
|
||||
associated_layer_protocol:
|
||||
type: string
|
||||
required: true
|
||||
description: One of the values of the property layer_protocols of the CP
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
address_data:
|
||||
type: list
|
||||
description: Provides information on the addresses to be assigned to the CP
|
||||
entry_schema:
|
||||
type: tosca.datatypes.nfv.AddressData
|
||||
required: false
|
||||
|
||||
tosca.datatypes.nfv.VnfProfile:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF.
|
||||
properties:
|
||||
instantiation_level:
|
||||
type: string
|
||||
description: Identifier of the instantiation level of the VNF DF to be used for instantiation. If not present, the default instantiation level as declared in the VNFD shall be used.
|
||||
required: false
|
||||
min_number_of_instances:
|
||||
type: integer
|
||||
description: Minimum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
max_number_of_instances:
|
||||
type: integer
|
||||
description: Maximum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0
|
||||
|
||||
tosca.datatypes.nfv.Qos:
|
||||
derived_from: tosca.datatypes.Root
|
||||
description: describes QoS data for a given VL used in a VNF deployment flavour
|
||||
properties:
|
||||
latency:
|
||||
type: scalar-unit.time #Number
|
||||
description: Specifies the maximum latency
|
||||
required: true
|
||||
constraints:
|
||||
- greater_than: 0 s
|
||||
packet_delay_variation:
|
||||
type: scalar-unit.time #Number
|
||||
description: Specifies the maximum jitter
|
||||
required: true
|
||||
constraints:
|
||||
- greater_or_equal: 0 s
|
||||
packet_loss_ratio:
|
||||
type: float
|
||||
description: Specifies the maximum packet loss ratio
|
||||
required: false
|
||||
constraints:
|
||||
- in_range: [ 0.0, 1.0 ]
|
||||
|
||||
capability_types:
|
||||
tosca.capabilities.nfv.VirtualLinkable:
|
||||
derived_from: tosca.capabilities.Node
|
||||
description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type
|
||||
|
||||
relationship_types:
|
||||
tosca.relationships.nfv.VirtualLinksTo:
|
||||
derived_from: tosca.relationships.DependsOn
|
||||
description: Represents an association relationship between the VduCp and VnfVirtualLink node types
|
||||
valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
|
||||
|
||||
node_types:
|
||||
tosca.nodes.nfv.Cp:
|
||||
derived_from: tosca.nodes.Root
|
||||
description: Provides information regarding the purpose of the connection point
|
||||
properties:
|
||||
layer_protocols:
|
||||
type: list
|
||||
description: Identifies which protocol the connection point uses for connectivity purposes
|
||||
required: true
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints:
|
||||
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
|
||||
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
|
||||
type: string
|
||||
description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
|
||||
required: false
|
||||
constraints:
|
||||
- valid_values: [ root, leaf ]
|
||||
description:
|
||||
type: string
|
||||
description: Provides human-readable information on the purpose of the connection point
|
||||
required: false
|
||||
protocol:
|
||||
type: list
|
||||
description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
|
||||
required: false
|
||||
entry_schema:
|
||||
type: tosca.datatypes.nfv.CpProtocolData
|
||||
trunk_mode:
|
||||
type: boolean
|
||||
description: Provides information about whether the CP instantiated from this Cp is in Trunk mode (802.1Q or other), When operating in "trunk mode", the Cp is capable of carrying traffic for several VLANs. Absence of this property implies that trunkMode is not configured for the Cp i.e. It is equivalent to boolean value "false".
|
||||
required: false
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,406 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: Simple deployment flavour for Sample VNF
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
- v2_sample2_types.yaml
|
||||
|
||||
topology_template:
|
||||
inputs:
|
||||
descriptor_id:
|
||||
type: string
|
||||
descriptor_version:
|
||||
type: string
|
||||
provider:
|
||||
type: string
|
||||
product_name:
|
||||
type: string
|
||||
software_version:
|
||||
type: string
|
||||
vnfm_info:
|
||||
type: list
|
||||
entry_schema:
|
||||
type: string
|
||||
flavour_id:
|
||||
type: string
|
||||
flavour_description:
|
||||
type: string
|
||||
|
||||
substitution_mappings:
|
||||
node_type: company.provider.VNF
|
||||
properties:
|
||||
flavour_id: simple
|
||||
requirements:
|
||||
virtual_link_external1_1: [ VDU1_CP1, virtual_link ]
|
||||
virtual_link_external1_2: [ VDU2_CP1, virtual_link ]
|
||||
virtual_link_external2_1: [ VDU1_CP2, virtual_link ]
|
||||
virtual_link_external2_2: [ VDU2_CP2, virtual_link ]
|
||||
|
||||
node_templates:
|
||||
VNF:
|
||||
type: company.provider.VNF
|
||||
properties:
|
||||
flavour_description: A simple flavour
|
||||
interfaces:
|
||||
Vnflcm:
|
||||
instantiate_start:
|
||||
implementation: sample-script
|
||||
instantiate_end:
|
||||
implementation: sample-script
|
||||
terminate_start:
|
||||
implementation: sample-script
|
||||
terminate_end:
|
||||
implementation: sample-script
|
||||
artifacts:
|
||||
sample-script:
|
||||
description: Sample script
|
||||
type: tosca.artifacts.Implementation.Python
|
||||
file: ../Scripts/sample_script.py
|
||||
|
||||
VDU1:
|
||||
type: tosca.nodes.nfv.Vdu.Compute
|
||||
properties:
|
||||
name: VDU1
|
||||
description: VDU1 compute node
|
||||
vdu_profile:
|
||||
min_number_of_instances: 1
|
||||
max_number_of_instances: 3
|
||||
capabilities:
|
||||
virtual_compute:
|
||||
properties:
|
||||
requested_additional_capabilities:
|
||||
properties:
|
||||
requested_additional_capability_name: m1.tiny
|
||||
support_mandatory: true
|
||||
target_performance_parameters:
|
||||
entry_schema: test
|
||||
virtual_memory:
|
||||
virtual_mem_size: 512 MB
|
||||
virtual_cpu:
|
||||
num_virtual_cpu: 1
|
||||
virtual_local_storage:
|
||||
- size_of_storage: 3 GB
|
||||
requirements:
|
||||
- virtual_storage: VirtualStorage
|
||||
|
||||
VDU2:
|
||||
type: tosca.nodes.nfv.Vdu.Compute
|
||||
properties:
|
||||
name: VDU2
|
||||
description: VDU2 compute node
|
||||
vdu_profile:
|
||||
min_number_of_instances: 1
|
||||
max_number_of_instances: 1
|
||||
sw_image_data:
|
||||
name: cirros-0.5.2-x86_64-disk
|
||||
version: '0.5.2'
|
||||
checksum:
|
||||
algorithm: sha-256
|
||||
hash: 932fcae93574e242dc3d772d5235061747dfe537668443a1f0567d893614b464
|
||||
container_format: bare
|
||||
disk_format: qcow2
|
||||
min_disk: 0 GB
|
||||
min_ram: 256 MB
|
||||
size: 12 GB
|
||||
capabilities:
|
||||
virtual_compute:
|
||||
properties:
|
||||
requested_additional_capabilities:
|
||||
properties:
|
||||
requested_additional_capability_name: m1.tiny
|
||||
support_mandatory: true
|
||||
target_performance_parameters:
|
||||
entry_schema: test
|
||||
virtual_memory:
|
||||
virtual_mem_size: 512 MB
|
||||
virtual_cpu:
|
||||
num_virtual_cpu: 1
|
||||
virtual_local_storage:
|
||||
- size_of_storage: 3 GB
|
||||
|
||||
VirtualStorage:
|
||||
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
|
||||
properties:
|
||||
virtual_block_storage_data:
|
||||
size_of_storage: 1 GB
|
||||
rdma_enabled: true
|
||||
sw_image_data:
|
||||
name: cirros-0.5.2-x86_64-disk
|
||||
version: '0.5.2'
|
||||
checksum:
|
||||
algorithm: sha-256
|
||||
hash: 932fcae93574e242dc3d772d5235061747dfe537668443a1f0567d893614b464
|
||||
container_format: bare
|
||||
disk_format: qcow2
|
||||
min_disk: 0 GB
|
||||
min_ram: 256 MB
|
||||
size: 12 GB
|
||||
|
||||
VDU1_CP1:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 0
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
|
||||
VDU1_CP2:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 1
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
|
||||
VDU1_CP3:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 2
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL1
|
||||
|
||||
VDU1_CP4:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 3
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL2
|
||||
|
||||
VDU1_CP5:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 4
|
||||
requirements:
|
||||
- virtual_binding: VDU1
|
||||
- virtual_link: internalVL3
|
||||
|
||||
VDU2_CP1:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 0
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
|
||||
VDU2_CP2:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 1
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
|
||||
VDU2_CP3:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 2
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL1
|
||||
|
||||
VDU2_CP4:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 3
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL2
|
||||
|
||||
VDU2_CP5:
|
||||
type: tosca.nodes.nfv.VduCp
|
||||
properties:
|
||||
layer_protocols: [ ipv4 ]
|
||||
order: 4
|
||||
requirements:
|
||||
- virtual_binding: VDU2
|
||||
- virtual_link: internalVL3
|
||||
|
||||
internalVL1:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: External Managed Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.3.0/24
|
||||
|
||||
internalVL2:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: External Managed Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.4.0/24
|
||||
|
||||
internalVL3:
|
||||
type: tosca.nodes.nfv.VnfVirtualLink
|
||||
properties:
|
||||
connectivity_type:
|
||||
layer_protocols: [ ipv4 ]
|
||||
description: Internal Virtual link in the VNF
|
||||
vl_profile:
|
||||
max_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
min_bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
virtual_link_protocol_data:
|
||||
- associated_layer_protocol: ipv4
|
||||
l3_protocol_data:
|
||||
ip_version: ipv4
|
||||
cidr: 192.168.5.0/24
|
||||
|
||||
groups:
|
||||
affinityOrAntiAffinityGroup1:
|
||||
type: tosca.groups.nfv.PlacementGroup
|
||||
members: [ VDU1, VDU2 ]
|
||||
|
||||
policies:
|
||||
- scaling_aspects:
|
||||
type: tosca.policies.nfv.ScalingAspects
|
||||
properties:
|
||||
aspects:
|
||||
VDU1_scale:
|
||||
name: VDU1_scale
|
||||
description: VDU1 scaling aspect
|
||||
max_scale_level: 2
|
||||
step_deltas:
|
||||
- delta_1
|
||||
|
||||
- VDU1_initial_delta:
|
||||
type: tosca.policies.nfv.VduInitialDelta
|
||||
properties:
|
||||
initial_delta:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- VDU2_initial_delta:
|
||||
type: tosca.policies.nfv.VduInitialDelta
|
||||
properties:
|
||||
initial_delta:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU2 ]
|
||||
|
||||
- VDU1_scaling_aspect_deltas:
|
||||
type: tosca.policies.nfv.VduScalingAspectDeltas
|
||||
properties:
|
||||
aspect: VDU1_scale
|
||||
deltas:
|
||||
delta_1:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- instantiation_levels:
|
||||
type: tosca.policies.nfv.InstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
description: Smallest size
|
||||
scale_info:
|
||||
VDU1_scale:
|
||||
scale_level: 0
|
||||
instantiation_level_2:
|
||||
description: Largest size
|
||||
scale_info:
|
||||
VDU1_scale:
|
||||
scale_level: 2
|
||||
default_level: instantiation_level_1
|
||||
|
||||
- VDU1_instantiation_levels:
|
||||
type: tosca.policies.nfv.VduInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
number_of_instances: 1
|
||||
instantiation_level_2:
|
||||
number_of_instances: 3
|
||||
targets: [ VDU1 ]
|
||||
|
||||
- VDU2_instantiation_levels:
|
||||
type: tosca.policies.nfv.VduInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
number_of_instances: 1
|
||||
instantiation_level_2:
|
||||
number_of_instances: 1
|
||||
targets: [ VDU2 ]
|
||||
|
||||
- internalVL1_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL1 ]
|
||||
|
||||
- internalVL2_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL2 ]
|
||||
|
||||
- internalVL3_instantiation_levels:
|
||||
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
|
||||
properties:
|
||||
levels:
|
||||
instantiation_level_1:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
instantiation_level_2:
|
||||
bitrate_requirements:
|
||||
root: 1048576
|
||||
leaf: 1048576
|
||||
targets: [ internalVL3 ]
|
||||
|
||||
- policy_antiaffinity_group:
|
||||
type: tosca.policies.nfv.AntiAffinityRule
|
||||
targets: [ affinityOrAntiAffinityGroup1 ]
|
||||
properties:
|
||||
scope: nfvi_node
|
@ -0,0 +1,31 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: Sample VNF
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
- v2_sample2_types.yaml
|
||||
- v2_sample2_df_simple.yaml
|
||||
|
||||
topology_template:
|
||||
inputs:
|
||||
selected_flavour:
|
||||
type: string
|
||||
description: VNF deployment flavour selected by the consumer. It is provided in the API
|
||||
|
||||
node_templates:
|
||||
VNF:
|
||||
type: company.provider.VNF
|
||||
properties:
|
||||
flavour_id: { get_input: selected_flavour }
|
||||
descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d7000000
|
||||
provider: Company
|
||||
product_name: Sample VNF
|
||||
software_version: '1.0'
|
||||
descriptor_version: '1.0'
|
||||
vnfm_info:
|
||||
- Tacker
|
||||
requirements:
|
||||
#- virtual_link_external # mapped in lower-level templates
|
||||
#- virtual_link_internal # mapped in lower-level templates
|
@ -0,0 +1,55 @@
|
||||
tosca_definitions_version: tosca_simple_yaml_1_2
|
||||
|
||||
description: VNF type definition
|
||||
|
||||
imports:
|
||||
- etsi_nfv_sol001_common_types.yaml
|
||||
- etsi_nfv_sol001_vnfd_types.yaml
|
||||
|
||||
node_types:
|
||||
company.provider.VNF:
|
||||
derived_from: tosca.nodes.nfv.VNF
|
||||
properties:
|
||||
descriptor_id:
|
||||
type: string
|
||||
constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d7000000 ] ]
|
||||
default: b1bb0ce7-ebca-4fa7-95ed-4840d7000000
|
||||
descriptor_version:
|
||||
type: string
|
||||
constraints: [ valid_values: [ '1.0' ] ]
|
||||
default: '1.0'
|
||||
provider:
|
||||
type: string
|
||||
constraints: [ valid_values: [ 'Company' ] ]
|
||||
default: 'Company'
|
||||
product_name:
|
||||
type: string
|
||||
constraints: [ valid_values: [ 'Sample VNF' ] ]
|
||||
default: 'Sample VNF'
|
||||
software_version:
|
||||
type: string
|
||||
constraints: [ valid_values: [ '1.0' ] ]
|
||||
default: '1.0'
|
||||
vnfm_info:
|
||||
type: list
|
||||
entry_schema:
|
||||
type: string
|
||||
constraints: [ valid_values: [ Tacker ] ]
|
||||
default: [ Tacker ]
|
||||
flavour_id:
|
||||
type: string
|
||||
constraints: [ valid_values: [ simple ] ]
|
||||
default: simple
|
||||
flavour_description:
|
||||
type: string
|
||||
default: "flavour"
|
||||
requirements:
|
||||
- virtual_link_external1:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
- virtual_link_external2:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
- virtual_link_internal:
|
||||
capability: tosca.capabilities.nfv.VirtualLinkable
|
||||
interfaces:
|
||||
Vnflcm:
|
||||
type: tosca.interfaces.nfv.Vnflcm
|
@ -0,0 +1,67 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
|
||||
class SampleScript(object):
|
||||
|
||||
def __init__(self, req, inst, grant_req, grant, csar_dir):
|
||||
self.req = req
|
||||
self.inst = inst
|
||||
self.grant_req = grant_req
|
||||
self.grant = grant
|
||||
self.csar_dir = csar_dir
|
||||
|
||||
def instantiate_start(self):
|
||||
pass
|
||||
|
||||
def instantiate_end(self):
|
||||
pass
|
||||
|
||||
def terminate_start(self):
|
||||
pass
|
||||
|
||||
def terminate_end(self):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
script_dict = pickle.load(sys.stdin.buffer)
|
||||
|
||||
operation = script_dict['operation']
|
||||
req = script_dict['request']
|
||||
inst = script_dict['vnf_instance']
|
||||
grant_req = script_dict['grant_request']
|
||||
grant = script_dict['grant_response']
|
||||
csar_dir = script_dict['tmp_csar_dir']
|
||||
|
||||
script = SampleScript(req, inst, grant_req, grant, csar_dir)
|
||||
try:
|
||||
getattr(script, operation)()
|
||||
except AttributeError:
|
||||
raise Exception("{} is not included in the script.".format(operation))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
os._exit(0)
|
||||
except Exception as ex:
|
||||
sys.stderr.write(str(ex))
|
||||
sys.stderr.flush()
|
||||
os._exit(1)
|
@ -0,0 +1,4 @@
|
||||
TOSCA-Meta-File-Version: 1.0
|
||||
CSAR-Version: 1.1
|
||||
Created-by: Onboarding portal
|
||||
Entry-Definitions: Definitions/v2_sample2_top.vnfd.yaml
|
@ -0,0 +1,86 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import yaml
|
||||
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.sol_refactored.infra_drivers.openstack import userdata_utils
|
||||
|
||||
|
||||
class DefaultUserData(userdata_utils.AbstractUserData):
|
||||
|
||||
@staticmethod
|
||||
def instantiate(req, inst, grant_req, grant, tmp_csar_dir):
|
||||
vnfd = userdata_utils.get_vnfd(inst['vnfdId'], tmp_csar_dir)
|
||||
flavour_id = req['flavourId']
|
||||
|
||||
hot_dict = vnfd.get_base_hot(flavour_id)
|
||||
top_hot = hot_dict['template']
|
||||
|
||||
nfv_dict = userdata_utils.init_nfv_dict(top_hot)
|
||||
|
||||
vdus = nfv_dict.get('VDU', {})
|
||||
for vdu_name, vdu_value in vdus.items():
|
||||
if 'computeFlavourId' in vdu_value:
|
||||
vdu_value['computeFlavourId'] = (
|
||||
userdata_utils.get_param_flavor(
|
||||
vdu_name, req, vnfd, grant))
|
||||
if 'vcImageId' in vdu_value:
|
||||
vdu_value['vcImageId'] = userdata_utils.get_param_image(
|
||||
vdu_name, req, vnfd, grant)
|
||||
if 'locationConstraints' in vdu_value:
|
||||
vdu_value['locationConstraints'] = (
|
||||
userdata_utils.get_param_zone(
|
||||
vdu_name, grant_req, grant))
|
||||
|
||||
cps = nfv_dict.get('CP', {})
|
||||
for cp_name, cp_value in cps.items():
|
||||
if 'network' in cp_value:
|
||||
cp_value['network'] = userdata_utils.get_param_network(
|
||||
cp_name, grant, req)
|
||||
if 'fixed_ips' in cp_value:
|
||||
ext_fixed_ips = userdata_utils.get_param_fixed_ips(
|
||||
cp_name, grant, req)
|
||||
fixed_ips = []
|
||||
for i in range(len(ext_fixed_ips)):
|
||||
if i not in cp_value['fixed_ips']:
|
||||
break
|
||||
ips_i = cp_value['fixed_ips'][i]
|
||||
if 'subnet' in ips_i:
|
||||
ips_i['subnet'] = ext_fixed_ips[i].get('subnet')
|
||||
if 'ip_address' in ips_i:
|
||||
ips_i['ip_address'] = ext_fixed_ips[i].get(
|
||||
'ip_address')
|
||||
fixed_ips.append(ips_i)
|
||||
cp_value['fixed_ips'] = fixed_ips
|
||||
|
||||
userdata_utils.apply_ext_managed_vls(top_hot, req, grant)
|
||||
|
||||
if 'nfv' in req.get('additionalParams', {}):
|
||||
nfv_dict = inst_utils.json_merge_patch(nfv_dict,
|
||||
req['additionalParams']['nfv'])
|
||||
if 'nfv' in grant.get('additionalParams', {}):
|
||||
nfv_dict = inst_utils.json_merge_patch(nfv_dict,
|
||||
grant['additionalParams']['nfv'])
|
||||
|
||||
fields = {
|
||||
'template': yaml.safe_dump(top_hot),
|
||||
'parameters': {'nfv': nfv_dict},
|
||||
'files': {}
|
||||
}
|
||||
for key, value in hot_dict.get('files', {}).items():
|
||||
fields['files'][key] = yaml.safe_dump(value)
|
||||
|
||||
return fields
|
51
tacker/tests/functional/sol_v2/samples/sample2/pkggen.py
Normal file
51
tacker/tests/functional/sol_v2/samples/sample2/pkggen.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.tests.functional.sol_v2 import paramgen
|
||||
from tacker.tests.functional.sol_v2 import utils
|
||||
|
||||
|
||||
zip_file_name = os.path.basename(os.path.abspath(".")) + '.zip'
|
||||
tmp_dir = tempfile.mkdtemp()
|
||||
vnfd_id = uuidutils.generate_uuid()
|
||||
utils.make_zip(".", tmp_dir, vnfd_id)
|
||||
|
||||
shutil.copy(os.path.join(tmp_dir, zip_file_name), ".")
|
||||
shutil.rmtree(tmp_dir)
|
||||
|
||||
create_req = paramgen.sample2_create(vnfd_id)
|
||||
terminate_req = paramgen.sample2_terminate()
|
||||
|
||||
net_ids = utils.get_network_ids(['net0', 'net1', 'net_mgmt'])
|
||||
subnet_ids = utils.get_subnet_ids(['subnet0', 'subnet1'])
|
||||
|
||||
instantiate_req = paramgen.sample2_instantiate(
|
||||
net_ids, subnet_ids, "http://localhost/identity/v3")
|
||||
|
||||
with open("create_req", "w") as f:
|
||||
f.write(json.dumps(create_req, indent=2))
|
||||
|
||||
with open("terminate_req", "w") as f:
|
||||
f.write(json.dumps(terminate_req, indent=2))
|
||||
|
||||
with open("instantiate_req", "w") as f:
|
||||
f.write(json.dumps(instantiate_req, indent=2))
|
147
tacker/tests/functional/sol_v2/test_vnflcm_basic.py
Normal file
147
tacker/tests/functional/sol_v2/test_vnflcm_basic.py
Normal file
@ -0,0 +1,147 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from tacker.tests.functional.sol_v2 import base_v2
|
||||
from tacker.tests.functional.sol_v2 import paramgen
|
||||
|
||||
|
||||
class VnfLcmTest(base_v2.BaseSolV2Test):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
super(VnfLcmTest, cls).setUpClass()
|
||||
|
||||
cur_dir = os.path.dirname(__file__)
|
||||
# tacker/tests/etc...
|
||||
# /functional/sol_v2
|
||||
image_dir = os.path.join(
|
||||
cur_dir, "../../etc/samples/etsi/nfv/common/Files/images")
|
||||
image_file = "cirros-0.5.2-x86_64-disk.img"
|
||||
image_path = os.path.abspath(os.path.join(image_dir, image_file))
|
||||
|
||||
sample1_path = os.path.join(cur_dir, "samples/sample1")
|
||||
cls.vnf_pkg_1, cls.vnfd_id_1 = cls.create_vnf_package(
|
||||
sample1_path, image_path=image_path)
|
||||
|
||||
sample2_path = os.path.join(cur_dir, "samples/sample2")
|
||||
# no image contained
|
||||
cls.vnf_pkg_2, cls.vnfd_id_2 = cls.create_vnf_package(sample2_path)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(VnfLcmTest, cls).tearDownClass()
|
||||
|
||||
cls.delete_vnf_package(cls.vnf_pkg_1)
|
||||
cls.delete_vnf_package(cls.vnf_pkg_2)
|
||||
|
||||
def setUp(self):
|
||||
super(VnfLcmTest, self).setUp()
|
||||
|
||||
def test_api_versions(self):
|
||||
path = "/vnflcm/api_versions"
|
||||
resp, body = self.tacker_client.do_request(
|
||||
path, "GET", version="2.0.0")
|
||||
self.assertEqual(200, resp.status_code)
|
||||
expected_body = {
|
||||
"uriPrefix": "/vnflcm",
|
||||
"apiVersions": [
|
||||
{'version': '1.3.0', 'isDeprecated': False},
|
||||
{'version': '2.0.0', 'isDeprecated': False}
|
||||
]
|
||||
}
|
||||
self.assertEqual(body, expected_body)
|
||||
|
||||
path = "/vnflcm/v2/api_versions"
|
||||
resp, body = self.tacker_client.do_request(
|
||||
path, "GET", version="2.0.0")
|
||||
self.assertEqual(200, resp.status_code)
|
||||
expected_body = {
|
||||
"uriPrefix": "/vnflcm/v2",
|
||||
"apiVersions": [
|
||||
{'version': '2.0.0', 'isDeprecated': False}
|
||||
]
|
||||
}
|
||||
self.assertEqual(body, expected_body)
|
||||
|
||||
def test_sample1(self):
|
||||
create_req = paramgen.sample1_create(self.vnfd_id_1)
|
||||
resp, body = self.create_vnf_instance(create_req)
|
||||
self.assertEqual(201, resp.status_code)
|
||||
inst_id = body['id']
|
||||
|
||||
net_ids = self.get_network_ids(['net0', 'net1', 'net_mgmt'])
|
||||
subnet_ids = self.get_subnet_ids(['subnet0', 'subnet1'])
|
||||
instantiate_req = paramgen.sample1_instantiate(
|
||||
net_ids, subnet_ids, self.auth_url)
|
||||
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
|
||||
self.assertEqual(202, resp.status_code)
|
||||
|
||||
lcmocc_id = os.path.basename(resp.headers['Location'])
|
||||
self.wait_lcmocc_complete(lcmocc_id)
|
||||
|
||||
resp, body = self.show_vnf_instance(inst_id)
|
||||
self.assertEqual(200, resp.status_code)
|
||||
# TODO(oda-g): check body
|
||||
|
||||
terminate_req = paramgen.sample1_terminate()
|
||||
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
|
||||
self.assertEqual(202, resp.status_code)
|
||||
|
||||
lcmocc_id = os.path.basename(resp.headers['Location'])
|
||||
self.wait_lcmocc_complete(lcmocc_id)
|
||||
|
||||
# wait a bit because there is a bit time lag between lcmocc DB
|
||||
# update and terminate completion.
|
||||
time.sleep(10)
|
||||
|
||||
resp, body = self.delete_vnf_instance(inst_id)
|
||||
self.assertEqual(204, resp.status_code)
|
||||
|
||||
def test_sample2(self):
|
||||
create_req = paramgen.sample2_create(self.vnfd_id_2)
|
||||
resp, body = self.create_vnf_instance(create_req)
|
||||
self.assertEqual(201, resp.status_code)
|
||||
inst_id = body['id']
|
||||
|
||||
net_ids = self.get_network_ids(['net0', 'net1', 'net_mgmt'])
|
||||
subnet_ids = self.get_subnet_ids(['subnet0', 'subnet1'])
|
||||
instantiate_req = paramgen.sample2_instantiate(
|
||||
net_ids, subnet_ids, self.auth_url)
|
||||
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
|
||||
self.assertEqual(202, resp.status_code)
|
||||
|
||||
lcmocc_id = os.path.basename(resp.headers['Location'])
|
||||
self.wait_lcmocc_complete(lcmocc_id)
|
||||
|
||||
resp, body = self.show_vnf_instance(inst_id)
|
||||
self.assertEqual(200, resp.status_code)
|
||||
# TODO(oda-g): check body
|
||||
|
||||
terminate_req = paramgen.sample2_terminate()
|
||||
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
|
||||
self.assertEqual(202, resp.status_code)
|
||||
|
||||
lcmocc_id = os.path.basename(resp.headers['Location'])
|
||||
self.wait_lcmocc_complete(lcmocc_id)
|
||||
|
||||
# wait a bit because there is a bit time lag between lcmocc DB
|
||||
# update and terminate completion.
|
||||
time.sleep(10)
|
||||
|
||||
resp, body = self.delete_vnf_instance(inst_id)
|
||||
self.assertEqual(204, resp.status_code)
|
72
tacker/tests/functional/sol_v2/utils.py
Normal file
72
tacker/tests/functional/sol_v2/utils.py
Normal file
@ -0,0 +1,72 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
|
||||
# vnfdId of samples must be this.
|
||||
SAMPLE_VNFD_ID = "b1bb0ce7-ebca-4fa7-95ed-4840d7000000"
|
||||
|
||||
|
||||
def make_zip(sample_dir, tmp_dir, vnfd_id, image_path=None):
|
||||
# NOTE: '.zip' will be added by shutil.make_archive
|
||||
zip_file_name = os.path.basename(os.path.abspath(sample_dir))
|
||||
zip_file_path = os.path.join(tmp_dir, zip_file_name)
|
||||
|
||||
tmp_contents = os.path.join(tmp_dir, "contents")
|
||||
shutil.copytree(os.path.join(sample_dir, "contents"), tmp_contents)
|
||||
|
||||
# replace vnfd_id
|
||||
def_path = os.path.join(tmp_contents, "Definitions")
|
||||
for entry in os.listdir(def_path):
|
||||
entry_path = os.path.join(def_path, entry)
|
||||
with open(entry_path, 'r') as f:
|
||||
content = f.read()
|
||||
content = content.replace(SAMPLE_VNFD_ID, vnfd_id)
|
||||
with open(entry_path, 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
if image_path is not None:
|
||||
# mkdir Files/ and copy image_path into it
|
||||
file_path = os.path.join(tmp_contents, "Files", "images")
|
||||
os.makedirs(file_path)
|
||||
shutil.copy(image_path, file_path)
|
||||
|
||||
shutil.make_archive(zip_file_path, "zip", tmp_contents)
|
||||
|
||||
|
||||
def get_network_ids(networks):
|
||||
# assume OS_* environment variables are already set
|
||||
net_ids = {}
|
||||
for net in networks:
|
||||
p = subprocess.run(
|
||||
["openstack", "net", "show", net, "-c", "id", "-f", "json"],
|
||||
capture_output=True, encoding='utf-8')
|
||||
net_ids[net] = json.loads(p.stdout)['id']
|
||||
return net_ids
|
||||
|
||||
|
||||
def get_subnet_ids(subnets):
|
||||
# assume OS_* environment variables are already set
|
||||
subnet_ids = {}
|
||||
for subnet in subnets:
|
||||
p = subprocess.run(
|
||||
["openstack", "subnet", "show", subnet, "-c", "id", "-f", "json"],
|
||||
capture_output=True, encoding='utf-8')
|
||||
subnet_ids[subnet] = json.loads(p.stdout)['id']
|
||||
return subnet_ids
|
0
tacker/tests/unit/sol_refactored/__init__.py
Normal file
0
tacker/tests/unit/sol_refactored/__init__.py
Normal file
0
tacker/tests/unit/sol_refactored/api/__init__.py
Normal file
0
tacker/tests/unit/sol_refactored/api/__init__.py
Normal file
62
tacker/tests/unit/sol_refactored/api/test_api_version.py
Normal file
62
tacker/tests/unit/sol_refactored/api/test_api_version.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
class TestAPIVersion(base.BaseTestCase):
|
||||
|
||||
def test_init_null(self):
|
||||
vers = api_version.APIVersion()
|
||||
self.assertTrue(vers.is_null())
|
||||
|
||||
@mock.patch.object(api_version, 'supported_versions',
|
||||
new=["3.1.4159", "2.0.0"])
|
||||
def test_init(self):
|
||||
for vers, vers_str in [("2.0.0", "2.0.0"),
|
||||
("3.1.4159", "3.1.4159"),
|
||||
("2.0.0-impl:foobar", "2.0.0")]:
|
||||
v = api_version.APIVersion(vers)
|
||||
self.assertEqual(str(v), vers_str)
|
||||
|
||||
def test_init_exceptions(self):
|
||||
self.assertRaises(sol_ex.InvalidAPIVersionString,
|
||||
api_version.APIVersion, "0.1.2")
|
||||
|
||||
self.assertRaises(sol_ex.APIVersionNotSupported,
|
||||
api_version.APIVersion, "9.9.9")
|
||||
|
||||
@mock.patch.object(api_version, 'supported_versions',
|
||||
new=["1.3.0", "1.3.1", "2.0.0"])
|
||||
def test_compare(self):
|
||||
self.assertTrue(api_version.APIVersion("1.3.0") <
|
||||
api_version.APIVersion("1.3.1"))
|
||||
|
||||
self.assertTrue(api_version.APIVersion("2.0.0") >
|
||||
api_version.APIVersion("1.3.1"))
|
||||
|
||||
@mock.patch.object(api_version, 'supported_versions',
|
||||
new=["1.3.0", "1.3.1", "2.0.0"])
|
||||
def test_matches(self):
|
||||
vers = api_version.APIVersion("2.0.0")
|
||||
self.assertTrue(vers.matches(api_version.APIVersion("1.3.0"),
|
||||
api_version.APIVersion()))
|
||||
|
||||
self.assertFalse(vers.matches(api_version.APIVersion(),
|
||||
api_version.APIVersion("1.3.1")))
|
80
tacker/tests/unit/sol_refactored/api/test_validator.py
Normal file
80
tacker/tests/unit/sol_refactored/api/test_validator.py
Normal file
@ -0,0 +1,80 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from tacker import context
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.api import validator
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
test_schema_v200 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfdId': {'type': 'string'},
|
||||
'ProductId': {'type': 'string'}
|
||||
},
|
||||
'required': ['vnfdId', 'ProductId'],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
test_schema_v210 = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'vnfdId': {'type': 'string'},
|
||||
'flavourId': {'type': 'string'}
|
||||
},
|
||||
'required': ['vnfdId', 'flavourId'],
|
||||
'additionalProperties': True
|
||||
}
|
||||
|
||||
|
||||
class TestValidator(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestValidator, self).setUp()
|
||||
self.context = context.get_admin_context_without_session()
|
||||
self.request = mock.Mock()
|
||||
self.request.context = self.context
|
||||
|
||||
@validator.schema(test_schema_v200, '2.0.0', '2.0.2')
|
||||
@validator.schema(test_schema_v210, '2.1.0', '2.2.0')
|
||||
def _test_method(self, request, body):
|
||||
return True
|
||||
|
||||
@mock.patch.object(api_version, 'supported_versions',
|
||||
new=['2.0.0', '2.0.1', '2.0.2', '2.1.0', '2.2.0'])
|
||||
def test_validator(self):
|
||||
body = {"vnfdId": "vnfd_id", "ProductId": "product_id"}
|
||||
for ok_ver in ['2.0.0', '2.0.1', '2.0.2']:
|
||||
self.context.api_version = api_version.APIVersion(ok_ver)
|
||||
result = self._test_method(request=self.request, body=body)
|
||||
self.assertTrue(result)
|
||||
for ng_ver in ['2.1.0', '2.2.0']:
|
||||
self.context.api_version = api_version.APIVersion(ng_ver)
|
||||
self.assertRaises(sol_ex.SolValidationError,
|
||||
self._test_method, request=self.request, body=body)
|
||||
|
||||
body = {"vnfdId": "vnfd_id", "flavourId": "flavour_id"}
|
||||
for ok_ver in ['2.1.0', '2.2.0']:
|
||||
self.context.api_version = api_version.APIVersion(ok_ver)
|
||||
result = self._test_method(request=self.request, body=body)
|
||||
self.assertTrue(result)
|
||||
for ng_ver in ['2.0.0', '2.0.1', '2.0.2']:
|
||||
self.context.api_version = api_version.APIVersion(ng_ver)
|
||||
self.assertRaises(sol_ex.SolValidationError,
|
||||
self._test_method, request=self.request, body=body)
|
48
tacker/tests/unit/sol_refactored/api/test_wsgi.py
Normal file
48
tacker/tests/unit/sol_refactored/api/test_wsgi.py
Normal file
@ -0,0 +1,48 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from tacker.sol_refactored.api import wsgi as sol_wsgi
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.tests.unit import base
|
||||
|
||||
|
||||
class TestWsgi(base.TestCase):
|
||||
|
||||
def test_response_too_big(self):
|
||||
self.config_fixture.config(group='v2_vnfm', max_content_length=20)
|
||||
body = {"key": "value0123456789"}
|
||||
response = sol_wsgi.SolResponse(200, body)
|
||||
self.assertRaises(sol_ex.ResponseTooBig,
|
||||
response.serialize, mock.Mock(), 'application/json')
|
||||
|
||||
def test_unknown_error_response(self):
|
||||
err_msg = "Test error"
|
||||
status = 500
|
||||
response = sol_wsgi.SolErrorResponse(Exception(err_msg), mock.Mock())
|
||||
problem_details = {
|
||||
"status": status,
|
||||
"detail": err_msg
|
||||
}
|
||||
self.assertEqual(status, response.status)
|
||||
self.assertEqual(problem_details, response.body)
|
||||
|
||||
def test_check_api_version_no_version(self):
|
||||
resource = sol_wsgi.SolResource(mock.Mock())
|
||||
request = mock.Mock()
|
||||
request.headers = {}
|
||||
self.assertRaises(sol_ex.APIVersionMissing,
|
||||
resource.check_api_version, request)
|
0
tacker/tests/unit/sol_refactored/common/__init__.py
Normal file
0
tacker/tests/unit/sol_refactored/common/__init__.py
Normal file
80
tacker/tests/unit/sol_refactored/common/test_coordinate.py
Normal file
80
tacker/tests/unit/sol_refactored/common/test_coordinate.py
Normal file
@ -0,0 +1,80 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import threading
|
||||
import time
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker.sol_refactored.common import coordinate
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
class TestCoordinate(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCoordinate, self).setUp()
|
||||
self.sem_1 = threading.Semaphore(value=0)
|
||||
self.sem_2 = threading.Semaphore(value=0)
|
||||
self.ok = False
|
||||
|
||||
@coordinate.lock_vnf_instance('{inst_id}')
|
||||
def _work_thread_1(self, inst_id, sleep_time):
|
||||
# notify to parent
|
||||
self.sem_1.release()
|
||||
|
||||
# wait to notify from parent
|
||||
self.sem_2.acquire()
|
||||
|
||||
if sleep_time:
|
||||
time.sleep(sleep_time)
|
||||
|
||||
@coordinate.lock_vnf_instance('{inst_id}')
|
||||
def _work_thread_2(self, inst_id):
|
||||
pass
|
||||
|
||||
@coordinate.lock_vnf_instance('{inst_id}', delay=True)
|
||||
def _work_thread_3(self, inst_id):
|
||||
self.ok = True
|
||||
|
||||
def test_lock_vnf_instance(self):
|
||||
inst_id = uuidutils.generate_uuid()
|
||||
th = threading.Thread(target=self._work_thread_1, args=(inst_id, 0))
|
||||
th.start()
|
||||
|
||||
# wait to run _work_thread_1
|
||||
self.sem_1.acquire()
|
||||
|
||||
self.assertRaises(sol_ex.OtherOperationInProgress,
|
||||
self._work_thread_2, inst_id)
|
||||
|
||||
self.sem_2.release()
|
||||
th.join()
|
||||
|
||||
def test_lock_vnf_instance_delay(self):
|
||||
inst_id = uuidutils.generate_uuid()
|
||||
th = threading.Thread(target=self._work_thread_1, args=(inst_id, 3))
|
||||
th.start()
|
||||
|
||||
# wait to run _work_thread_1
|
||||
self.sem_1.acquire()
|
||||
|
||||
self.sem_2.release()
|
||||
self._work_thread_3(inst_id=inst_id)
|
||||
|
||||
th.join()
|
||||
|
||||
self.assertTrue(self.ok)
|
@ -0,0 +1,67 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
class TestVnfInstanceUtils(base.BaseTestCase):
|
||||
|
||||
def test_json_merge_patch(self):
|
||||
# patch is not dict.
|
||||
target = {"key1", "value1"}
|
||||
patch = "text"
|
||||
result = inst_utils.json_merge_patch(target, patch)
|
||||
self.assertEqual(patch, result)
|
||||
|
||||
# target is not dict.
|
||||
target = "text"
|
||||
patch = {"key1", "value1"}
|
||||
result = inst_utils.json_merge_patch(target, patch)
|
||||
self.assertEqual(patch, result)
|
||||
|
||||
# normal case
|
||||
target = {
|
||||
"key1": "value1", # remine
|
||||
"key2": "value2", # modify
|
||||
"key3": "value3", # delete
|
||||
"key4": { # nested
|
||||
"key4_1": "value4_1", # remine
|
||||
"key4_2": "value4_2", # modify
|
||||
"key4_3": {"key4_3_1", "value4_3_1"} # delete
|
||||
}
|
||||
}
|
||||
patch = {
|
||||
"key2": "value2_x", # modify
|
||||
"key3": None, # delete
|
||||
"key4": {
|
||||
"key4_2": "value4_2_x", # modify
|
||||
"key4_3": None, # delete
|
||||
"key4_4": "value4_4" # add
|
||||
},
|
||||
"key5": "value5" # add
|
||||
}
|
||||
expected_result = {
|
||||
"key1": "value1",
|
||||
"key2": "value2_x",
|
||||
"key4": {
|
||||
"key4_1": "value4_1",
|
||||
"key4_2": "value4_2_x",
|
||||
"key4_4": "value4_4"
|
||||
},
|
||||
"key5": "value5"
|
||||
}
|
||||
result = inst_utils.json_merge_patch(target, patch)
|
||||
self.assertEqual(expected_result, result)
|
151
tacker/tests/unit/sol_refactored/common/test_vnfd_utils.py
Normal file
151
tacker/tests/unit/sol_refactored/common/test_vnfd_utils.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
from tacker.sol_refactored.common import vnfd_utils
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
SAMPLE_VNFD_ID = "b1bb0ce7-ebca-4fa7-95ed-4840d7000000"
|
||||
SAMPLE_FLAVOUR_ID = "simple"
|
||||
|
||||
|
||||
class TestVnfd(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestVnfd, self).setUp()
|
||||
cur_dir = os.path.dirname(__file__)
|
||||
sample_dir = os.path.join(cur_dir, "..", "samples")
|
||||
|
||||
self.vnfd_1 = vnfd_utils.Vnfd(SAMPLE_VNFD_ID)
|
||||
self.vnfd_1.init_from_csar_dir(os.path.join(sample_dir, "sample1"))
|
||||
|
||||
def test_get_sw_image(self):
|
||||
expected_result = {
|
||||
'VDU2': 'VDU2-image',
|
||||
'VirtualStorage': 'image-1.0.0-x86_64-disk'
|
||||
}
|
||||
result = self.vnfd_1.get_sw_image(SAMPLE_FLAVOUR_ID)
|
||||
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_get_sw_image_data(self):
|
||||
result = self.vnfd_1.get_sw_image_data(SAMPLE_FLAVOUR_ID)
|
||||
# check 'file' existence if 'artifact' is defined
|
||||
self.assertEqual('../Files/images/image-1.0.0-x86_64-disk.img',
|
||||
result['VDU2']['file'])
|
||||
self.assertNotIn('file', result['VirtualStorage'])
|
||||
|
||||
def test_get_nodes(self):
|
||||
expected_vdus = ['VDU1', 'VDU2']
|
||||
expected_storages = ['VirtualStorage']
|
||||
expected_vls = ['internalVL1', 'internalVL2', 'internalVL3']
|
||||
expected_cps = ['VDU1_CP1', 'VDU1_CP2', 'VDU1_CP3', 'VDU1_CP4',
|
||||
'VDU1_CP5', 'VDU2_CP1', 'VDU2_CP2', 'VDU2_CP3',
|
||||
'VDU2_CP4', 'VDU2_CP5']
|
||||
# check keys and sampling data
|
||||
result_vdus = self.vnfd_1.get_vdu_nodes(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_vdus, list(result_vdus.keys()))
|
||||
self.assertEqual('VDU1 compute node',
|
||||
result_vdus['VDU1']['properties']['description'])
|
||||
|
||||
result_storages = self.vnfd_1.get_storage_nodes(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_storages, list(result_storages.keys()))
|
||||
self.assertEqual('1.0.0',
|
||||
result_storages['VirtualStorage']['properties']['sw_image_data']
|
||||
['version'])
|
||||
|
||||
result_vls = self.vnfd_1.get_virtual_link_nodes(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_vls, list(result_vls.keys()))
|
||||
self.assertEqual(['ipv4'],
|
||||
result_vls['internalVL3']['properties']['connectivity_type']
|
||||
['layer_protocols'])
|
||||
|
||||
result_cps = self.vnfd_1.get_vducp_nodes(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_cps, list(result_cps.keys()))
|
||||
self.assertEqual(0,
|
||||
result_cps['VDU2_CP1']['properties']['order'])
|
||||
|
||||
def test_get_vdu_cps(self):
|
||||
expected_result = ['VDU1_CP1', 'VDU1_CP2', 'VDU1_CP3',
|
||||
'VDU1_CP4', 'VDU1_CP5']
|
||||
result = self.vnfd_1.get_vdu_cps(SAMPLE_FLAVOUR_ID, 'VDU1')
|
||||
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_get_base_hot(self):
|
||||
result = self.vnfd_1.get_base_hot(SAMPLE_FLAVOUR_ID)
|
||||
# check keys and sampling data
|
||||
self.assertEqual(['VDU1.yaml'], list(result['files'].keys()))
|
||||
self.assertEqual(1,
|
||||
result['template']['resources']['VDU1_scale_out']['properties']
|
||||
['scaling_adjustment'])
|
||||
self.assertEqual({'get_param': 'net3'},
|
||||
result['files']['VDU1.yaml']['resources']['VDU1_CP3']
|
||||
['properties']['network'])
|
||||
|
||||
def test_get_vl_name_from_cp(self):
|
||||
vdu_cps = self.vnfd_1.get_vducp_nodes(SAMPLE_FLAVOUR_ID)
|
||||
result = self.vnfd_1.get_vl_name_from_cp(SAMPLE_FLAVOUR_ID,
|
||||
vdu_cps['VDU1_CP1'])
|
||||
# externalVL
|
||||
self.assertEqual(None, result)
|
||||
|
||||
result = self.vnfd_1.get_vl_name_from_cp(SAMPLE_FLAVOUR_ID,
|
||||
vdu_cps['VDU1_CP3'])
|
||||
self.assertEqual('internalVL1', result)
|
||||
|
||||
def test_get_compute_flavor(self):
|
||||
result = self.vnfd_1.get_compute_flavor(SAMPLE_FLAVOUR_ID, 'VDU1')
|
||||
self.assertEqual('m1.tiny', result)
|
||||
|
||||
def test_get_default_instantiation_level(self):
|
||||
result = self.vnfd_1.get_default_instantiation_level(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual('instantiation_level_1', result)
|
||||
|
||||
def test_get_vdu_num(self):
|
||||
result = self.vnfd_1.get_vdu_num(SAMPLE_FLAVOUR_ID, 'VDU1',
|
||||
'instantiation_level_2')
|
||||
self.assertEqual(3, result)
|
||||
|
||||
def test_get_placement_groups(self):
|
||||
expected_result = {'affinityOrAntiAffinityGroup1': ['VDU1', 'VDU2']}
|
||||
result = self.vnfd_1.get_placement_groups(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_get_tartget(self):
|
||||
result = self.vnfd_1.get_affinity_targets(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual([], result)
|
||||
|
||||
expected_result = [(['VDU1', 'VDU2'], 'nfvi_node')]
|
||||
result = self.vnfd_1.get_anti_affinity_targets(SAMPLE_FLAVOUR_ID)
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_get_interface_script(self):
|
||||
# script specified
|
||||
result = self.vnfd_1.get_interface_script(SAMPLE_FLAVOUR_ID,
|
||||
"instantiate_start")
|
||||
self.assertEqual("../Scripts/sample_script.py", result)
|
||||
|
||||
# [] specified
|
||||
result = self.vnfd_1.get_interface_script(SAMPLE_FLAVOUR_ID,
|
||||
"scale_start")
|
||||
self.assertEqual(None, result)
|
||||
|
||||
# not specified
|
||||
result = self.vnfd_1.get_interface_script(SAMPLE_FLAVOUR_ID,
|
||||
"scale_end")
|
||||
self.assertEqual(None, result)
|
@ -0,0 +1,94 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from oslo_utils import uuidutils
|
||||
|
||||
from tacker import context
|
||||
from tacker.sol_refactored.api import api_version
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.controller import vnflcm_v2
|
||||
from tacker.sol_refactored.nfvo import nfvo_client
|
||||
from tacker.sol_refactored import objects
|
||||
from tacker.tests.unit.db import base as db_base
|
||||
|
||||
|
||||
class TestVnflcmV2(db_base.SqlTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestVnflcmV2, self).setUp()
|
||||
objects.register_all(False)
|
||||
self.controller = vnflcm_v2.VnfLcmControllerV2()
|
||||
self.request = mock.Mock()
|
||||
self.request.context = context.get_admin_context()
|
||||
self.request.context.api_version = api_version.APIVersion("2.0.0")
|
||||
|
||||
@mock.patch.object(nfvo_client.NfvoClient, 'get_vnf_package_info_vnfd')
|
||||
def test_create_pkg_disabled(self, mocked_get_vnf_package_info_vnfd):
|
||||
vnfd_id = uuidutils.generate_uuid()
|
||||
pkg_info = objects.VnfPkgInfoV2(
|
||||
id=uuidutils.generate_uuid(),
|
||||
vnfdId=vnfd_id,
|
||||
vnfProvider="provider",
|
||||
vnfProductName="product",
|
||||
vnfSoftwareVersion="software version",
|
||||
vnfdVersion="vnfd version",
|
||||
operationalState="DISABLED"
|
||||
)
|
||||
mocked_get_vnf_package_info_vnfd.return_value = pkg_info
|
||||
body = {
|
||||
"vnfdId": vnfd_id,
|
||||
"vnfInstanceName": "test",
|
||||
"vnfInstanceDescription": "test"
|
||||
}
|
||||
self.assertRaises(sol_ex.VnfdIdNotEnabled,
|
||||
self.controller.create, request=self.request, body=body)
|
||||
|
||||
def test_invalid_subscripion(self):
|
||||
body = {
|
||||
"callbackUri": "http://127.0.0.1:6789/notification",
|
||||
"authentication": {
|
||||
"authType": ["BASIC"]
|
||||
}
|
||||
}
|
||||
ex = self.assertRaises(sol_ex.InvalidSubscription,
|
||||
self.controller.subscription_create, request=self.request,
|
||||
body=body)
|
||||
self.assertEqual("ParmasBasic must be specified.", ex.detail)
|
||||
|
||||
body = {
|
||||
"callbackUri": "http://127.0.0.1:6789/notification",
|
||||
"authentication": {
|
||||
"authType": ["OAUTH2_CLIENT_CREDENTIALS"]
|
||||
}
|
||||
}
|
||||
ex = self.assertRaises(sol_ex.InvalidSubscription,
|
||||
self.controller.subscription_create, request=self.request,
|
||||
body=body)
|
||||
self.assertEqual("paramsOauth2ClientCredentials must be specified.",
|
||||
ex.detail)
|
||||
|
||||
body = {
|
||||
"callbackUri": "http://127.0.0.1:6789/notification",
|
||||
"authentication": {
|
||||
"authType": ["TLS_CERT"]
|
||||
}
|
||||
}
|
||||
ex = self.assertRaises(sol_ex.InvalidSubscription,
|
||||
self.controller.subscription_create, request=self.request,
|
||||
body=body)
|
||||
self.assertEqual("'TLS_CERT' is not supported at the moment.",
|
||||
ex.detail)
|
153
tacker/tests/unit/sol_refactored/controller/test_vnflcm_view.py
Normal file
153
tacker/tests/unit/sol_refactored/controller/test_vnflcm_view.py
Normal file
@ -0,0 +1,153 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from dateutil import parser
|
||||
|
||||
from tacker.sol_refactored.common import exceptions as sol_ex
|
||||
from tacker.sol_refactored.controller import vnflcm_view
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
class FakeField(object):
|
||||
|
||||
def __init__(self, nullable):
|
||||
self.nullable = nullable
|
||||
|
||||
|
||||
class TestFilterExpr(base.BaseTestCase):
|
||||
|
||||
def test_match_datetime(self):
|
||||
fexp = vnflcm_view.FilterExpr('lte',
|
||||
['fooTime'], ['2021-08-24T02:26:46.796695+00:00'])
|
||||
val = {'fooTime': parser.isoparse("2021-08-11T08:09:10+00:00")}
|
||||
self.assertTrue(fexp.match(val))
|
||||
|
||||
val2 = {'fooTime': parser.isoparse("2021-09-01T08:09:10+00:00")}
|
||||
self.assertFalse(fexp.match(val2))
|
||||
|
||||
def test_match_int(self):
|
||||
fexp = vnflcm_view.FilterExpr('in',
|
||||
['fooNum'], ['11', '13', '17', '19', '23'])
|
||||
self.assertTrue(fexp.match(dict(fooNum=11)))
|
||||
self.assertFalse(fexp.match(dict(fooNum=15)))
|
||||
|
||||
def test_match_bool(self):
|
||||
fexp = vnflcm_view.FilterExpr('eq',
|
||||
['foo/bar'], ['false'])
|
||||
self.assertFalse(fexp.match({'foo': {'bar': True}}))
|
||||
|
||||
|
||||
class TestAttributeSelector(base.BaseTestCase):
|
||||
|
||||
def test_invalid_params(self):
|
||||
self.assertRaises(sol_ex.InvalidAttributeSelector,
|
||||
vnflcm_view.AttributeSelector,
|
||||
[], all_fields='1', exclude_default='1')
|
||||
|
||||
self.assertRaises(sol_ex.InvalidAttributeSelector,
|
||||
vnflcm_view.AttributeSelector,
|
||||
[], fields='a', exclude_fields='b')
|
||||
|
||||
self.assertRaises(sol_ex.InvalidAttributeSelector,
|
||||
vnflcm_view.AttributeSelector,
|
||||
[], exclude_default='1', exclude_fields='b')
|
||||
|
||||
def test_filter_default(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'])
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(True)
|
||||
r = selector.filter(obj, {'foo': 1, 'bar': 2, 'baz': 3})
|
||||
self.assertEqual(r, {'baz': 3})
|
||||
|
||||
def test_filter_exclude_default(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'],
|
||||
exclude_default='1')
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(True)
|
||||
r = selector.filter(obj, {'foo': 1, 'bar': 2, 'baz': 3})
|
||||
self.assertEqual(r, {'baz': 3})
|
||||
|
||||
def test_filter_default_not_omittable(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'])
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(False)
|
||||
r = selector.filter(obj, {'foo': 1, 'bar': 2})
|
||||
self.assertEqual(r, {'foo': 1, 'bar': 2})
|
||||
|
||||
def test_filter_all_fields(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'])
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(True)
|
||||
odict = {'foo': 1, 'bar': 2, 'baz': 3}
|
||||
r = selector.filter(obj, odict)
|
||||
self.assertEqual(r, odict)
|
||||
|
||||
def test_filter_exclude_fields(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'],
|
||||
exclude_fields='bar,baz')
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(True)
|
||||
r = selector.filter(obj, {'foo': 1, 'bar': 2, 'baz': 3})
|
||||
self.assertEqual(r, {'foo': 1})
|
||||
|
||||
def test_filter_fields(self):
|
||||
selector = vnflcm_view.AttributeSelector(['foo', 'bar'],
|
||||
exclude_default='1',
|
||||
fields='bar')
|
||||
obj = mock.NonCallableMagicMock()
|
||||
obj.fields.__getitem__.return_value = FakeField(True)
|
||||
r = selector.filter(obj, {'foo': 1, 'bar': 2, 'baz': 3})
|
||||
self.assertEqual(r, {'bar': 2, 'baz': 3})
|
||||
|
||||
|
||||
class TestBaseViewBuilder(base.BaseTestCase):
|
||||
|
||||
def test_parse_filter(self):
|
||||
builder = vnflcm_view.BaseViewBuilder()
|
||||
f1 = builder.parse_filter("(eq,foo/bar,abc)")
|
||||
self.assertEqual(len(f1), 1)
|
||||
self.assertEqual(f1[0].attr, ['foo', 'bar'])
|
||||
|
||||
f2 = builder.parse_filter("(eq,foo/bar,')1');(neq,baz,'''a')")
|
||||
self.assertEqual(len(f2), 2)
|
||||
self.assertEqual(f2[0].attr, ['foo', 'bar'])
|
||||
self.assertEqual(f2[0].values, [')1'])
|
||||
self.assertEqual(f2[1].attr, ['baz'])
|
||||
self.assertEqual(f2[1].values, ["'a"])
|
||||
|
||||
f3 = builder.parse_filter("(eq,~01/c~1~a/~be,10)")
|
||||
self.assertEqual(len(f3), 1)
|
||||
self.assertEqual(f3[0].attr, ['~1', 'c/,', '@e'])
|
||||
|
||||
f4 = builder.parse_filter("(in,foo,'a','bc');(cont,bar,'def','ghi')")
|
||||
self.assertEqual(len(f4), 2)
|
||||
self.assertEqual(len(f4[0].values), 2)
|
||||
self.assertEqual(len(f4[1].values), 2)
|
||||
|
||||
def test_parse_filter_invalid(self):
|
||||
builder = vnflcm_view.BaseViewBuilder()
|
||||
self.assertRaises(sol_ex.InvalidAttributeFilter,
|
||||
builder.parse_filter,
|
||||
"(le,foo/bar,abc)")
|
||||
|
||||
self.assertRaises(sol_ex.InvalidAttributeFilter,
|
||||
builder.parse_filter,
|
||||
"(gt,foo/bar)")
|
||||
|
||||
self.assertRaises(sol_ex.InvalidAttributeFilter,
|
||||
builder.parse_filter,
|
||||
"(gt,foo,1,2)")
|
@ -0,0 +1,218 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
from tacker.sol_refactored.infra_drivers.openstack import userdata_utils
|
||||
from tacker.tests import base
|
||||
|
||||
|
||||
SAMPLE_VNFD_ID = "b1bb0ce7-ebca-4fa7-95ed-4840d7000000"
|
||||
SAMPLE_FLAVOUR_ID = "simple"
|
||||
|
||||
|
||||
class TestVnfd(base.BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestVnfd, self).setUp()
|
||||
cur_dir = os.path.dirname(__file__)
|
||||
sample_dir = os.path.join(cur_dir, "../..", "samples")
|
||||
|
||||
self.vnfd_1 = userdata_utils.get_vnfd(SAMPLE_VNFD_ID,
|
||||
os.path.join(sample_dir, "sample1"))
|
||||
|
||||
def test_init_nfv_dict(self):
|
||||
hot_dict = self.vnfd_1.get_base_hot(SAMPLE_FLAVOUR_ID)
|
||||
top_hot = hot_dict['template']
|
||||
|
||||
expected_result = {
|
||||
'VDU': {
|
||||
'VDU1': {'computeFlavourId': None},
|
||||
'VirtualStorage': {'vcImageId': None},
|
||||
'VDU2': {'computeFlavourId': None, 'vcImageId': None}
|
||||
},
|
||||
'CP': {
|
||||
'VDU1_CP1': {'network': None},
|
||||
'VDU1_CP2': {'network': None,
|
||||
'fixed_ips': {0: {'subnet': None}}},
|
||||
'VDU2_CP1': {'network': None,
|
||||
'fixed_ips': {0: {'ip_address': None}}},
|
||||
'VDU2_CP2': {'network': None,
|
||||
'fixed_ips': {0: {'ip_address': None,
|
||||
'subnet': None}}}
|
||||
}
|
||||
}
|
||||
result = userdata_utils.init_nfv_dict(top_hot)
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_get_param_flavor(self):
|
||||
req = {'flavourId': SAMPLE_FLAVOUR_ID}
|
||||
flavor = 'm1.large'
|
||||
grant = {
|
||||
'vimAssets': {
|
||||
'computeResourceFlavours': [
|
||||
{'vnfdVirtualComputeDescId': 'VDU1',
|
||||
'vimFlavourId': flavor}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
result = userdata_utils.get_param_flavor('VDU1', req,
|
||||
self.vnfd_1, grant)
|
||||
self.assertEqual(flavor, result)
|
||||
|
||||
# if not exist in grant, get from VNFD
|
||||
result = userdata_utils.get_param_flavor('VDU2', req,
|
||||
self.vnfd_1, grant)
|
||||
self.assertEqual('m1.tiny', result)
|
||||
|
||||
def test_get_param_image(self):
|
||||
req = {'flavourId': SAMPLE_FLAVOUR_ID}
|
||||
image_id = 'f30e149d-b3c7-497a-8b19-a092bc81e47b'
|
||||
grant = {
|
||||
'vimAssets': {
|
||||
'softwareImages': [
|
||||
{'vnfdSoftwareImageId': 'VDU2',
|
||||
'vimSoftwareImageId': image_id},
|
||||
{'vnfdSoftwareImageId': 'VirtualStorage',
|
||||
'vimSoftwareImageId': 'image-1.0.0-x86_64-disk'}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
result = userdata_utils.get_param_image('VDU2', req,
|
||||
self.vnfd_1, grant)
|
||||
self.assertEqual(image_id, result)
|
||||
|
||||
def test_get_param_zone(self):
|
||||
grant_req = {
|
||||
'addResources': [
|
||||
{'id': 'dd60c89a-29a2-43bc-8cff-a534515523df',
|
||||
'type': 'COMPUTE', 'resourceTemplateId': 'VDU1'}
|
||||
]
|
||||
}
|
||||
grant = {
|
||||
'zones': [
|
||||
{'id': '717f6ae9-3094-46b6-b070-89ede8337571',
|
||||
'zoneId': 'nova'}
|
||||
],
|
||||
'addResources': [
|
||||
{'resourceDefinitionId':
|
||||
'dd60c89a-29a2-43bc-8cff-a534515523df',
|
||||
'zoneId': '717f6ae9-3094-46b6-b070-89ede8337571'}
|
||||
]
|
||||
}
|
||||
|
||||
result = userdata_utils.get_param_zone('VDU1', grant_req, grant)
|
||||
self.assertEqual('nova', result)
|
||||
|
||||
def test_get_parama_network(self):
|
||||
res_id = "8fe7cc1a-e4ac-41b9-8b89-ed14689adb9c"
|
||||
req = {
|
||||
"extVirtualLinks": [
|
||||
{
|
||||
"id": "acf5c23a-02d3-42e6-801b-fba0314bb6aa",
|
||||
"resourceId": res_id,
|
||||
"extCps": [
|
||||
{
|
||||
"cpdId": "VDU1_CP1",
|
||||
"cpConfig": {} # omit
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
result = userdata_utils.get_param_network('VDU1_CP1', {}, req)
|
||||
self.assertEqual(res_id, result)
|
||||
|
||||
def test_get_param_fixed_ips(self):
|
||||
ip_address = "10.10.1.101"
|
||||
subnet_id = "9defebca-3e9c-4bd2-9fa0-c4210c56ece6"
|
||||
ext_cp = {
|
||||
"cpdId": "VDU2_CP2",
|
||||
"cpConfig": {
|
||||
"VDU2_CP2_1": {
|
||||
"cpProtocolData": [
|
||||
{
|
||||
"layerProtocol": "IP_OVER_ETHERNET",
|
||||
"ipOverEthernet": {
|
||||
"ipAddresses": [
|
||||
{
|
||||
"type": "IPV4",
|
||||
"fixedAddresses": [
|
||||
ip_address
|
||||
],
|
||||
"subnetId": subnet_id
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
req = {
|
||||
"extVirtualLinks": [
|
||||
{
|
||||
"id": "8b49f4b6-1ff9-4a03-99cf-ff445b788436",
|
||||
"resourceId": "4c54f742-5f1d-4287-bb81-37bf2e6ddc3e",
|
||||
"extCps": [ext_cp]
|
||||
}
|
||||
]
|
||||
}
|
||||
expected_result = [{'ip_address': ip_address, 'subnet': subnet_id}]
|
||||
|
||||
result = userdata_utils.get_param_fixed_ips('VDU2_CP2', {}, req)
|
||||
self.assertEqual(expected_result, result)
|
||||
|
||||
def test_apply_ext_managed_vls(self):
|
||||
hot_dict = self.vnfd_1.get_base_hot(SAMPLE_FLAVOUR_ID)
|
||||
top_hot = hot_dict['template']
|
||||
|
||||
res_id = "c738c2bb-1d24-4883-a2d8-a5c7c4ee8879"
|
||||
vl = "internalVL1"
|
||||
vl_subnet = "internalVL1_subnet"
|
||||
req = {
|
||||
"extManagedVirtualLinks": [
|
||||
{
|
||||
"id": "1c7825cf-b883-4281-b8fc-ee006df8b2ba",
|
||||
"vnfVirtualLinkDescId": vl,
|
||||
"resourceId": res_id
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# make sure before apply
|
||||
self.assertEqual({'get_resource': vl},
|
||||
top_hot['resources']['VDU1_scale_group']['properties']
|
||||
['resource']['properties']['net3'])
|
||||
self.assertEqual({'get_resource': vl},
|
||||
top_hot['resources']['VDU2_CP3']['properties']['network'])
|
||||
self.assertIn(vl, top_hot['resources'])
|
||||
self.assertIn(vl_subnet, top_hot['resources'])
|
||||
|
||||
userdata_utils.apply_ext_managed_vls(top_hot, req, {})
|
||||
|
||||
# check after
|
||||
# replaced to resource id
|
||||
self.assertEqual(res_id,
|
||||
top_hot['resources']['VDU1_scale_group']['properties']
|
||||
['resource']['properties']['net3'])
|
||||
self.assertEqual(res_id,
|
||||
top_hot['resources']['VDU2_CP3']['properties']['network'])
|
||||
# removed
|
||||
self.assertNotIn(vl, top_hot['resources'])
|
||||
self.assertNotIn(vl_subnet, top_hot['resources'])
|
134
tacker/tests/unit/sol_refactored/objects/test_base.py
Normal file
134
tacker/tests/unit/sol_refactored/objects/test_base.py
Normal file
@ -0,0 +1,134 @@
|
||||
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
from unittest import mock
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from tacker.sol_refactored.objects import base
|
||||
from tacker.sol_refactored.objects import fields
|
||||
from tacker.tests import base as tests_base
|
||||
|
||||
|
||||
@base.TackerObjectRegistry.register
|
||||
class MyObj(base.TackerObject):
|
||||
|
||||
VERSION = '1.0'
|
||||
|
||||
fields = {
|
||||
'id': fields.StringField(nullable=False),
|
||||
'data': fields.StringField(nullable=False),
|
||||
'listData': fields.ListOfObjectsField(
|
||||
'MySubObj', nullable=True),
|
||||
'createdAt': fields.DateTimeField(nullable=False),
|
||||
}
|
||||
|
||||
|
||||
@base.TackerObjectRegistry.register
|
||||
class MySubObj(base.TackerObject):
|
||||
|
||||
VERSION = '1.0'
|
||||
|
||||
fields = {
|
||||
'id': fields.StringField(nullable=False),
|
||||
'data': fields.StringField(nullable=False),
|
||||
}
|
||||
|
||||
|
||||
@base.TackerObjectRegistry.register
|
||||
class MyDBObj(MyObj, base.TackerPersistentObject):
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class TestTackerObject(tests_base.BaseTestCase):
|
||||
|
||||
def test_tacker_obj_get_changes(self):
|
||||
o = MyObj.from_dict({'id': 'foo',
|
||||
'data': 'abcde',
|
||||
'createdAt': '2021-09-01T12:34:56+09:00'})
|
||||
o.obj_reset_changes()
|
||||
self.assertEqual(o.tacker_obj_get_changes(), {})
|
||||
o.data = '12345'
|
||||
o.createdAt = datetime.datetime(2021, 8, 7, 6, 5, 44,
|
||||
tzinfo=datetime.timezone(datetime.timedelta(hours=-9)))
|
||||
changes = o.tacker_obj_get_changes()
|
||||
self.assertEqual(len(changes), 2)
|
||||
self.assertIn('data', changes)
|
||||
self.assertIsNone(changes['createdAt'].tzinfo)
|
||||
self.assertEqual(changes['createdAt'].hour, o.createdAt.hour + 9)
|
||||
|
||||
def test_from_dict(self):
|
||||
o = MyObj.from_dict({'id': 'foo',
|
||||
'data': 'abcde',
|
||||
'listData': [
|
||||
{'id': 'foo1', 'data': 'bar1'},
|
||||
{'id': 'foo2', 'data': 'bar2'},
|
||||
],
|
||||
'createdAt': '2021-09-01T12:34:56+09:00'})
|
||||
self.assertEqual(o.id, 'foo')
|
||||
self.assertIsInstance(o.createdAt, datetime.datetime)
|
||||
self.assertEqual(len(o.listData), 2)
|
||||
self.assertEqual(o.listData[1].data, 'bar2')
|
||||
|
||||
|
||||
class TestTackerObjectSerializer(tests_base.BaseTestCase):
|
||||
|
||||
def test_serialize_entity(self):
|
||||
serializer = base.TackerObjectSerializer()
|
||||
o = MyObj.from_dict({'id': 'foo',
|
||||
'data': 'abcde',
|
||||
'listData': [
|
||||
{'id': 'foo1', 'data': 'bar1'},
|
||||
{'id': 'foo2', 'data': 'bar2'},
|
||||
],
|
||||
'createdAt': '2021-09-01T12:34:56+09:00'})
|
||||
entity = serializer.serialize_entity(mock.Mock(), o)
|
||||
self.assertEqual(entity['tacker_sol_refactored_object.name'], 'MyObj')
|
||||
self.assertEqual(entity['tacker_sol_refactored_object.namespace'],
|
||||
'tacker_sol_refactored')
|
||||
data = entity['tacker_sol_refactored_object.data']
|
||||
self.assertEqual(set(data.keys()),
|
||||
set(['id', 'data', 'listData', 'createdAt']))
|
||||
|
||||
o2 = serializer.deserialize_entity(mock.Mock(), entity)
|
||||
self.assertEqual(o2.listData[1].id, o.listData[1].id)
|
||||
self.assertEqual(o2.createdAt, o.createdAt)
|
||||
|
||||
|
||||
class TestTackerPersistentObject(tests_base.BaseTestCase):
|
||||
|
||||
def test_from_db_obj(self):
|
||||
o = MyDBObj.from_db_obj(
|
||||
{'id': 'foo', 'data': 'abcde',
|
||||
'listData': '[{"id": "foo1", "data": "bar1"},'
|
||||
'{"id": "foo2", "data": "bar2"}]',
|
||||
'createdAt': datetime.datetime(2021, 9, 1, 12, 34, 56)})
|
||||
self.assertEqual(o.id, 'foo')
|
||||
self.assertEqual(len(o.listData), 2)
|
||||
self.assertEqual(o.listData[0].data, 'bar1')
|
||||
|
||||
def test_to_db_obj(self):
|
||||
o = MyDBObj.from_dict({'id': 'foo',
|
||||
'data': 'abcde',
|
||||
'listData': [
|
||||
{'id': 'foo1', 'data': 'bar1'},
|
||||
{'id': 'foo2', 'data': 'bar2'},
|
||||
],
|
||||
'createdAt': '2021-09-01T12:34:56'})
|
||||
dbobj = o.to_db_obj()
|
||||
self.assertEqual(jsonutils.loads(dbobj['listData']),
|
||||
[{"id": "foo1", "data": "bar1"}, {"id": "foo2", "data": "bar2"}])
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user