Implementation Artifacts support in Tacker

Implements: blueprint add-artifacts-vnf-packages

* Add artifacts support for vnf packages:
  * Read and verify artifacts from TOSCA.meta and manifest file.
  * Modify and add artifact related APIs

Change-Id: Iad045407338535022aa385b57125225ee6253732
This commit is contained in:
LiangLu 2020-07-07 05:57:44 -04:00
parent 959da510b5
commit c0647bde69
71 changed files with 8496 additions and 236 deletions

View File

@ -13,9 +13,15 @@
# License for the specific language governing permissions and limitations
# under the License.
from io import BytesIO
import mimetypes
import os
from glance_store import exceptions as store_exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import uuidutils
import six
@ -28,6 +34,7 @@ from tacker._i18n import _
from tacker.api.schemas import vnf_packages
from tacker.api import validation
from tacker.api.views import vnf_packages as vnf_packages_view
from tacker.common import csar_utils
from tacker.common import exceptions
from tacker.common import utils
from tacker.conductor.conductorrpc import vnf_pkgm_rpc
@ -99,8 +106,8 @@ class VnfPkgmController(wsgi.Controller):
try:
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
request.context, id,
expected_attrs=["vnf_deployment_flavours", "vnfd"])
request.context, id, expected_attrs=[
"vnf_deployment_flavours", "vnfd", "vnf_artifacts"])
except exceptions.VnfPackageNotFound:
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
@ -475,6 +482,134 @@ class VnfPkgmController(wsgi.Controller):
request.response.headers['Content-Type'] = 'application/zip'
return self._create_vnfd_zip(vnfd_files_and_data)
@wsgi.response(http_client.OK)
@wsgi.expected_errors((http_client.BAD_REQUEST, http_client.FORBIDDEN,
http_client.NOT_FOUND, http_client.CONFLICT,
http_client.REQUESTED_RANGE_NOT_SATISFIABLE))
def fetch_vnf_package_artifacts(self, request, id, artifact_path):
context = request.environ['tacker.context']
# get policy
context.can(vnf_package_policies.VNFPKGM % 'fetch_artifact')
# get vnf_package
if not uuidutils.is_uuid_like(id):
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
try:
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
request.context, id,
expected_attrs=["vnf_artifacts"])
except exceptions.VnfPackageNotFound:
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
if vnf_package.onboarding_state != \
fields.PackageOnboardingStateType.ONBOARDED:
msg = _("VNF Package %(id)s state is not "
"%(onboarded)s")
raise webob.exc.HTTPConflict(explanation=msg % {"id": id,
"onboarded": fields.PackageOnboardingStateType.ONBOARDED})
offset, chunk_size = 0, None
# get all artifact's path
artifact_file_paths = []
for item in vnf_package.vnf_artifacts:
artifact_file_paths.append(item.artifact_path)
if artifact_path in artifact_file_paths:
# get file's size
csar_path = self._get_csar_path(vnf_package)
absolute_artifact_path = os.path.join(csar_path, artifact_path)
if not os.path.isfile(absolute_artifact_path):
msg = _(
"This type of path(url) '%s' is currently not supported") \
% artifact_path
raise webob.exc.HTTPBadRequest(explanation=msg)
artifact_size = os.path.getsize(absolute_artifact_path)
range_val = self._get_range_from_request(request, artifact_size)
# range_val exists
if range_val:
if isinstance(range_val, webob.byterange.Range):
# get the position of the last byte in the artifact file
response_end = artifact_size - 1
if range_val.start >= 0:
offset = range_val.start
else:
if abs(range_val.start) < artifact_size:
offset = artifact_size + range_val.start
if range_val.end is not None and \
range_val.end < artifact_size:
chunk_size = range_val.end - offset
response_end = range_val.end - 1
else:
chunk_size = artifact_size - offset
request.response.status_int = 206
# range_val does not exist, download the whole content of file
else:
offset = 0
chunk_size = artifact_size
# get file's mineType;
mime_type = mimetypes.guess_type(artifact_path.split('/')[-1])[0]
if mime_type:
request.response.headers['Content-Type'] = mime_type
else:
request.response.headers['Content-Type'] = \
'application/octet-stream'
try:
artifact_data = self._download_vnf_artifact(
absolute_artifact_path, offset, chunk_size)
except exceptions.FailedToGetVnfArtifact as e:
LOG.error(e.msg)
raise webob.exc.HTTPInternalServerError(
explanation=e.msg)
request.response.text = artifact_data.decode('utf-8')
if request.response.status_int == 206:
request.response.headers['Content-Range'] = 'bytes %s-%s/%s' \
% (offset,
response_end,
artifact_size)
else:
chunk_size = artifact_size
request.response.headers['Content-Length'] = chunk_size
return request.response
else:
msg = _("Not Found Artifact File.")
raise webob.exc.HTTPNotFound(explanation=msg)
def _get_csar_path(self, vnf_package):
csar_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
vnf_package.id)
if not os.path.isdir(csar_path):
location = vnf_package.location_glance_store
try:
zip_path = glance_store.load_csar(vnf_package.id, location)
csar_utils.extract_csar_zip_file(zip_path, csar_path)
except (store_exceptions.GlanceStoreException) as e:
exc_msg = encodeutils.exception_to_unicode(e)
msg = (_("Exception raised from glance store can be "
"unrecoverable if it is not related to connection"
" error. Error: %s.") % exc_msg)
raise exceptions.FailedToGetVnfArtifact(error=msg)
return csar_path
def _download_vnf_artifact(self, artifact_file_path, offset=0,
chunk_size=None):
try:
with open(artifact_file_path, 'rb') as f:
f.seek(offset, 1)
vnf_artifact_data = f.read(chunk_size)
return vnf_artifact_data
except Exception as e:
exc_msg = encodeutils.exception_to_unicode(e)
msg = (_("Exception raised while reading artifact file"
" Error: %s.") % exc_msg)
raise exceptions.FailedToGetVnfArtifact(error=msg)
def _create_vnfd_zip(self, vnfd_files_and_data):
buff = BytesIO()
with ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) as zip_archive:

View File

@ -81,3 +81,10 @@ class VnfpkgmAPIRouter(wsgi.Router):
self._setup_route(mapper,
"/vnf_packages/{id}/vnfd",
methods, controller, default_resource)
# Allowed methods on /vnf_packages/{id}/artifacts/{artifact_path}
methods = {"GET": "fetch_vnf_package_artifacts"}
self._setup_route(mapper,
"/vnf_packages/{id}/artifacts/"
"{artifact_path:.*?/*.*?}",
methods, controller, default_resource)

View File

@ -12,22 +12,38 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
import re
import shutil
import yaml
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from six.moves.urllib.parse import urlparse
from toscaparser.prereq.csar import CSAR
from toscaparser.tosca_template import ToscaTemplate
import zipfile
from tacker.common import exceptions
import tacker.conf
import urllib.request as urllib2
HASH_DICT = {
'sha-224': hashlib.sha224,
'sha-256': hashlib.sha256,
'sha-384': hashlib.sha384,
'sha-512': hashlib.sha512
}
CONF = tacker.conf.CONF
LOG = logging.getLogger(__name__)
TOSCA_META = 'TOSCA-Metadata/TOSCA.meta'
ARTIFACT_KEYS = ['Source', 'Algorithm', 'Hash']
IMAGE_FORMAT_LIST = ['raw', 'vhd', 'vhdx', 'vmdk', 'vdi', 'iso', 'ploop',
'qcow2', 'aki', 'ari', 'ami', 'img']
def _check_type(custom_def, node_type, type_list):
@ -272,7 +288,140 @@ def _get_data_from_csar(tosca, context, id):
error_msg = "No VNF flavours are available"
raise exceptions.InvalidCSAR(error_msg)
return vnf_data, flavours
csar = CSAR(tosca.input_path, tosca.a_file)
vnf_artifacts = []
if csar.validate():
vnf_artifacts = _get_vnf_artifacts(csar)
return vnf_data, flavours, vnf_artifacts
def _get_vnf_artifacts(csar):
vnf_artifacts = []
if csar.is_tosca_metadata:
if csar._get_metadata("ETSI-Entry-Manifest"):
manifest_path = csar._get_metadata("ETSI-Entry-Manifest")
if manifest_path.lower().endswith(".mf"):
manifest_data = csar.zfile.read(manifest_path)
vnf_artifacts = _convert_artifacts(
vnf_artifacts, manifest_data, csar)
else:
invalid_manifest_err_msg = (
('The file "%(manifest)s" in the CSAR "%(csar)s" does not '
'contain valid manifest.') %
{'manifest': manifest_path, 'csar': csar.path})
raise exceptions.InvalidCSAR(invalid_manifest_err_msg)
tosca_data = csar.zfile.read(TOSCA_META)
vnf_artifacts = _convert_artifacts(vnf_artifacts, tosca_data, csar)
else:
filelist = csar.zfile.namelist()
main_template_file_name = os.path.splitext(
csar.main_template_file_name)[0]
for path in filelist:
if path.lower().endswith(".mf"):
manifest_file_name = os.path.splitext(path)[0]
if manifest_file_name == main_template_file_name:
manifest_data = csar.zfile.read(path)
vnf_artifacts = _convert_artifacts(
vnf_artifacts, manifest_data, csar)
else:
invalid_manifest_err_msg = \
(('The filename "%(manifest)s" is an invalid name.'
'The name must be the same as the main template '
'file name.') %
{'manifest': path})
raise exceptions.InvalidCSAR(invalid_manifest_err_msg)
# Deduplication
vnf_artifacts = [dict(t) for t in set([tuple(d.items())
for d in vnf_artifacts])]
return vnf_artifacts
def _convert_artifacts(vnf_artifacts, artifacts_data, csar):
artifacts_data_split = re.split(b'\n\n+', artifacts_data)
for data in artifacts_data_split:
if re.findall(b'.?Name:.?|.?Source:.?|', data):
# validate key's existence
if re.findall(b'.?Algorithm:.?|.?Hash:.?', data):
artifact_data_dict = yaml.safe_load(data)
if 'Name' in artifact_data_dict.keys():
artifact_data_dict.update(
{"Source": artifact_data_dict.pop("Name")})
if 'Content-Type' in artifact_data_dict.keys():
del artifact_data_dict['Content-Type']
if sorted(ARTIFACT_KEYS) != sorted(artifact_data_dict.keys()):
missing_key = list(set(ARTIFACT_KEYS) ^
set(artifact_data_dict.keys()))
missing_key = sorted(missing_key)
invalid_artifact_err_msg = \
(('One of the artifact information '
'may not have the key("%(key)s")') %
{'key': missing_key})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
# validate value's existence
for key, value in artifact_data_dict.items():
if not value:
invalid_artifact_err_msg = \
(('One of the artifact information may not have '
'the key value("%(key)s")') % {'key': key})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
artifact_path = artifact_data_dict.get('Source')
if os.path.splitext(artifact_path)[-1][1:] \
in IMAGE_FORMAT_LIST:
continue
else:
algorithm = artifact_data_dict.get('Algorithm')
hash_code = artifact_data_dict.get('Hash')
result = _validate_hash(algorithm, hash_code,
csar, artifact_path)
if result:
vnf_artifacts.append(artifact_data_dict)
else:
invalid_artifact_err_msg = \
(('The hash "%(hash)s" of artifact file '
'"%(artifact)s" is an invalid value.') %
{'hash': hash_code, 'artifact': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
return vnf_artifacts
def _validate_hash(algorithm, hash_code, csar, artifact_path):
z = zipfile.ZipFile(csar.path)
algorithm = algorithm.lower()
# validate Algorithm's value
if algorithm in HASH_DICT.keys():
hash_obj = HASH_DICT[algorithm]()
else:
invalid_artifact_err_msg = (('The algorithm("%(algorithm)s") of '
'artifact("%(artifact_path)s") is '
'an invalid value.') %
{'algorithm': algorithm,
'artifact_path': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
filelist = csar.zfile.namelist()
# validate Source's value
if artifact_path in filelist:
hash_obj.update(z.read(artifact_path))
elif ((urlparse(artifact_path).scheme == 'file') or
(bool(urlparse(artifact_path).scheme) and
bool(urlparse(artifact_path).netloc))):
hash_obj.update(urllib2.urlopen(artifact_path).read())
else:
invalid_artifact_err_msg = (('The path("%(artifact_path)s") of '
'artifact Source is an invalid value.') %
{'artifact_path': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
# validate Hash's value
if hash_code == hash_obj.hexdigest():
return True
else:
return False
def extract_csar_zip_file(file_path, extract_path):

View File

@ -248,6 +248,10 @@ class VnfSoftwareImageNotFound(NotFound):
message = _("No vnf software image with id %(id)s.")
class VnfArtifactNotFound(NotFound):
message = _("No vnf artifact with id %(id)s.")
class VnfInstantiatedInfoNotFound(NotFound):
message = _("No vnf instantiated info for vnf id %(vnf_instance_id)s.")
@ -303,6 +307,10 @@ class FailedToGetVnfdData(Invalid):
message = _("Failed to get csar zip file from glance store: %(error)s")
class FailedToGetVnfArtifact(Invalid):
message = _("Failed to get artifact file from glance store: %(error)s")
class FailedToGetVnfPackageDetails(Invalid):
message = _("Failed to get vnf package details: %(error)s")

View File

@ -233,7 +233,26 @@ class Conductor(manager.Manager):
self._create_software_images(
context, sw_image, deploy_flavour.id)
def _onboard_vnf_package(self, context, vnf_package, vnf_data, flavours):
def _create_vnf_artifacts(self, context, package_uuid, artifact):
vnf_artifact = objects.VnfPackageArtifactInfo(context=context)
vnf_artifact.package_uuid = package_uuid
vnf_artifact.artifact_path = artifact['Source']
vnf_artifact.algorithm = artifact['Algorithm']
vnf_artifact.hash = artifact['Hash']
vnf_artifact._metadata = {}
vnf_artifact.create()
def _onboard_vnf_package(
self,
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts):
if vnf_artifacts:
for artifact in vnf_artifacts:
self._create_vnf_artifacts(context, vnf_package.id, artifact)
package_vnfd = objects.VnfPackageVnfd(context=context)
package_vnfd.package_uuid = vnf_package.id
@ -251,9 +270,14 @@ class Conductor(manager.Manager):
def upload_vnf_package_content(self, context, vnf_package):
location = vnf_package.location_glance_store
zip_path = glance_store.load_csar(vnf_package.id, location)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
context.elevated(), vnf_package.id, zip_path)
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
self._onboard_vnf_package(
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts)
vnf_package.onboarding_state = (
fields.PackageOnboardingStateType.ONBOARDED)
vnf_package.operational_state = (
@ -282,10 +306,15 @@ class Conductor(manager.Manager):
vnf_package.save()
zip_path = glance_store.load_csar(vnf_package.id, location)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
context.elevated(), vnf_package.id, zip_path)
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
self._onboard_vnf_package(
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts)
vnf_package.onboarding_state = (
fields.PackageOnboardingStateType.ONBOARDED)

View File

@ -129,6 +129,20 @@ class VnfPackageVnfd(model_base.BASE, VnfPackageVnfdSoftDeleteMixin,
vnfd_version = sa.Column(sa.String(255), nullable=False)
class VnfPackageArtifactInfo(model_base.BASE, models.SoftDeleteMixin,
models.TimestampMixin, models_v1.HasId):
"""Contains all info about vnf artifacts."""
__tablename__ = 'vnf_artifacts'
package_uuid = sa.Column(sa.String(36),
sa.ForeignKey('vnf_packages.id'),
nullable=False)
artifact_path = sa.Column(sa.Text(), nullable=False)
algorithm = sa.Column(sa.String(64), nullable=False)
hash = sa.Column(sa.String(128), nullable=False)
_metadata = sa.Column(sa.JSON(), nullable=True)
class VnfPackage(model_base.BASE, models.SoftDeleteMixin,
models.TimestampMixin, models_v1.HasTenant,
models_v1.HasId):
@ -160,6 +174,12 @@ class VnfPackage(model_base.BASE, models.SoftDeleteMixin,
'VnfPackageVnfd.package_uuid,'
'VnfPackageVnfd.deleted == 0)')
vnf_artifacts = orm.relationship(
VnfPackageArtifactInfo,
primaryjoin='and_(VnfPackage.id == '
'VnfPackageArtifactInfo.package_uuid,'
'VnfPackageArtifactInfo.deleted == 0)')
@property
def metadetails(self):
return {m.key: m.value for m in self._metadata}

View File

@ -1 +1 @@
d2e39e01d540
e06fbdc90a32

View File

@ -0,0 +1,55 @@
# Copyright (C) 2020 FUJITSU DATA
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""add db tables for add artifacts
Revision ID: e06fbdc90a32
Revises: d2e39e01d540
Create Date: 2020-09-17 02:52:41.435112
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'e06fbdc90a32'
down_revision = 'd2e39e01d540'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import Boolean
from tacker.db import types
def upgrade(active_plugins=None, options=None):
op.create_table(
'vnf_artifacts',
sa.Column('id', types.Uuid(length=36), nullable=False),
sa.Column('package_uuid', types.Uuid(length=36), nullable=False),
sa.Column('artifact_path', sa.Text(), nullable=False),
sa.Column('algorithm', sa.String(64), nullable=False),
sa.Column('hash', sa.String(128), nullable=False),
sa.Column('_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', Boolean, default=False),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['package_uuid'],
['vnf_packages.id'], ),
mysql_engine='InnoDB'
)

View File

@ -35,3 +35,4 @@ def register_all():
__import__('tacker.objects.instantiate_vnf_req')
__import__('tacker.objects.vnf_resources')
__import__('tacker.objects.terminate_vnf_req')
__import__('tacker.objects.vnf_artifact')

View File

@ -0,0 +1,208 @@
# Copyright 2020 NTT DATA.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_versionedobjects import base as ovoo_base
from tacker._i18n import _
from tacker.common import exceptions
from tacker.common import utils
from tacker.db import api as db_api
from tacker.db.db_sqlalchemy import api
from tacker.db.db_sqlalchemy import models
from tacker.objects import base
from tacker.objects import fields
LOG = logging.getLogger(__name__)
@db_api.context_manager.writer
def _vnf_artifacts_create(context, values):
vnf_artifacts = models.VnfPackageArtifactInfo()
vnf_artifacts.update(values)
vnf_artifacts.save(context.session)
return vnf_artifacts
@db_api.context_manager.reader
def _vnf_artifact_get_by_id(context, id):
query = api.model_query(context, models.VnfPackageArtifactInfo,
read_deleted="no").filter_by(id=id)
result = query.first()
if not result:
raise exceptions.VnfArtifactNotFound(id=id)
return result
@base.TackerObjectRegistry.register
class VnfPackageArtifactInfo(base.TackerObject, base.TackerPersistentObject):
ALL_ATTRIBUTES = {
"additionalArtifacts": {
'artifactPath': ('artifact_path', 'string',
'VnfPackageArtifactInfo'),
'metadata': ('_metadata', 'dict', 'VnfPackageArtifactInfo'),
"checksum": {
'hash': ('hash', 'string', 'VnfPackageArtifactInfo'),
'algorithm': ('algorithm', 'string', 'VnfPackageArtifactInfo')
}
}
}
FLATTEN_ATTRIBUTES = utils.flatten_dict(ALL_ATTRIBUTES.copy())
SIMPLE_ATTRIBUTES = ['artifactPath']
COMPLEX_ATTRIBUTES = [
'additionalArtifacts',
'additionalArtifacts/metadata',
'additionalArtifacts/checksum']
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.UUIDField(nullable=False),
'package_uuid': fields.UUIDField(nullable=False),
'artifact_path': fields.StringField(nullable=False),
'algorithm': fields.StringField(nullable=False),
'hash': fields.StringField(nullable=False),
'_metadata': fields.DictOfStringsField(nullable=True, default={})
}
@base.remotable_classmethod
def get_by_id(cls, context, id):
db_artifact = _vnf_artifact_get_by_id(context, id)
return cls._from_db_object(context, cls(), db_artifact)
@staticmethod
def _from_db_object(context, vnf_artifacts, db_vnf_artifacts):
for key in vnf_artifacts.fields:
setattr(vnf_artifacts, key, db_vnf_artifacts[key])
vnf_artifacts._context = context
vnf_artifacts.obj_reset_changes()
return vnf_artifacts
def obj_load_attr(self, attrname):
if not self._context:
raise exceptions.OrphanedObjectError(
method='obj_load_attr', objtype=self.obj_name())
if 'id' not in self:
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('attribute %s not lazy-loadable') % attrname)
LOG.debug("Lazy-loading '%(attr)s' on %(name)s id %(id)s",
{'attr': attrname,
'name': self.obj_name(),
'id': self.id,
})
self._obj_load_attr(attrname)
def _obj_load_attr(self, attrname):
if attrname in self.fields and attrname != 'id':
self._load_generic(attrname)
else:
# NOTE(nirajsingh): Raise error if non existing field is
# requested.
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('attribute %s not lazy-loadable') % attrname)
self.obj_reset_changes([attrname])
def _load_generic(self, attrname):
artifact = self.__class__.get_by_id(self._context,
id=self.id)
if attrname not in artifact:
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('loading %s requires recursion') % attrname)
for field in self.fields:
if field in artifact and field not in self:
setattr(self, field, getattr(artifact, field))
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exceptions.ObjectActionError(action='create',
reason=_('already created'))
updates = self.obj_get_changes()
db_vnf_artifacts = _vnf_artifacts_create(
self._context, updates)
self._from_db_object(self._context, self, db_vnf_artifacts)
def to_dict(self, include_fields=None):
response = dict()
fields = ['additionalArtifacts/%s' % attribute for attribute in
self.SIMPLE_ATTRIBUTES]
to_fields = set(fields).intersection(include_fields)
for field in to_fields:
display_field = field.split("/")[-1]
response[display_field] = getattr(
self, self.FLATTEN_ATTRIBUTES[field][0])
to_fields = set([key for key in self.FLATTEN_ATTRIBUTES.keys()
if key.startswith('additionalArtifacts/checksum')])
checksum = dict()
to_fields = to_fields.intersection(include_fields)
for field in to_fields:
display_field = field.split("/")[-1]
checksum[display_field] = getattr(
self, self.FLATTEN_ATTRIBUTES[field][0])
if checksum:
response.update({"checksum": checksum})
metadata = dict()
to_fields = set(['additionalArtifacts/metadata']).\
intersection(include_fields)
if to_fields:
metadata_json = \
getattr(self, self.
FLATTEN_ATTRIBUTES['additionalArtifacts/metadata'][0])
if metadata_json is not None:
metadata.update(metadata_json)
response.update({"metadata": metadata})
return response
@base.TackerObjectRegistry.register
class VnfPackageArtifactInfoList(ovoo_base.ObjectListBase, base.TackerObject):
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('VnfPackageArtifactInfo')
}
def to_dict(self, include_fields=None):
artifactList = list()
for artifact in self.objects:
arti_dict = artifact.to_dict(include_fields)
if arti_dict:
artifactList.append(arti_dict)
return artifactList

View File

@ -33,11 +33,15 @@ from tacker.db.db_sqlalchemy import models
from tacker import objects
from tacker.objects import base
from tacker.objects import fields
from tacker.objects import vnf_artifact
from tacker.objects import vnf_software_image
_NO_DATA_SENTINEL = object()
VNF_PACKAGE_OPTIONAL_ATTRS = ['vnf_deployment_flavours', 'vnfd']
VNF_PACKAGE_OPTIONAL_ATTRS = [
'vnf_deployment_flavours',
'vnfd',
'vnf_artifacts']
LOG = logging.getLogger(__name__)
@ -178,6 +182,9 @@ def _vnf_package_list_by_filters(context, read_deleted=None, filters=None):
query = query.join(models.VnfDeploymentFlavour).join(
models.VnfSoftwareImage)
if 'VnfPackageArtifactInfo' in filter_data:
query = query.join(models.VnfPackageArtifactInfo)
query = apply_filters(query, filters)
return query.all()
@ -233,6 +240,9 @@ def _destroy_vnf_package(context, package_uuid):
api.model_query(context, models.VnfDeploymentFlavour). \
filter_by(package_uuid=package_uuid). \
update(updated_values, synchronize_session=False)
api.model_query(context, models.VnfPackageArtifactInfo). \
filter_by(package_uuid=package_uuid). \
update(updated_values, synchronize_session=False)
api.model_query(context, models.VnfPackageVnfd). \
filter_by(package_uuid=package_uuid). \
soft_delete(synchronize_session=False)
@ -294,6 +304,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
}
ALL_ATTRIBUTES.update(vnf_software_image.VnfSoftwareImage.ALL_ATTRIBUTES)
ALL_ATTRIBUTES.update(vnf_artifact.VnfPackageArtifactInfo.ALL_ATTRIBUTES)
FLATTEN_ATTRIBUTES = utils.flatten_dict(ALL_ATTRIBUTES.copy())
@ -305,6 +316,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
COMPLEX_ATTRIBUTES = ["checksum", "userDefinedData"]
COMPLEX_ATTRIBUTES.extend(
vnf_software_image.VnfSoftwareImage.COMPLEX_ATTRIBUTES)
COMPLEX_ATTRIBUTES.extend(vnf_artifact.VnfPackageArtifactInfo.
COMPLEX_ATTRIBUTES)
# Version 1.1: Added 'size' to persist size of VnfPackage.
VERSION = '1.1'
@ -325,6 +338,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
'VnfDeploymentFlavoursList', nullable=True),
'vnfd': fields.ObjectField('VnfPackageVnfd', nullable=True),
'size': fields.IntegerField(nullable=False, default=0),
'vnf_artifacts': fields.ObjectField('VnfPackageArtifactInfoList',
nullable=True)
}
def __init__(self, context=None, **kwargs):
@ -375,6 +390,10 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
if 'vnfd' in expected_attrs:
vnf_package._load_vnfd(db_vnf_package.get('vnfd'))
if 'vnf_artifacts' in expected_attrs:
vnf_package._load_vnf_artifacts(
db_vnf_package.get('vnf_artifacts'))
def _load_vnf_deployment_flavours(self, db_flavours=_NO_DATA_SENTINEL):
if db_flavours is _NO_DATA_SENTINEL:
vnf_package = self.get_by_id(
@ -412,6 +431,25 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self._context, db_vnfd)
self.obj_reset_changes(['vnfd'])
def _load_vnf_artifacts(self, db_artifact=_NO_DATA_SENTINEL):
if db_artifact is _NO_DATA_SENTINEL:
vnf_package = self.get_by_id(
self._context, self.id,
expected_attrs=['vnf_artifacts'])
if 'vnf_artifacts' in vnf_package:
self.vnf_artifacts = vnf_package.vnf_artifacts
self.vnf_artifacts.obj_reset_changes(recursive=True)
self.obj_reset_changes(['vnf_artifacts'])
else:
self.vnf_artifacts = objects.\
VnfPackageArtifactInfoList(objects=[])
elif db_artifact:
self.vnf_artifacts = base.obj_make_list(
self._context, objects.VnfPackageArtifactInfoList(
self._context), objects.VnfPackageArtifactInfo,
db_artifact)
self.obj_reset_changes(['vnf_artifacts'])
def _load_generic(self, attrname):
vnf_package = self.__class__.get_by_id(self._context,
id=self.id,
@ -449,6 +487,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self._load_vnf_deployment_flavours()
elif attrname == 'vnfd':
self._load_vnfd()
elif attrname == 'vnf_artifacts':
self._load_vnf_artifacts()
elif attrname in self.fields and attrname != 'id':
self._load_generic(attrname)
else:
@ -472,7 +512,11 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self.id = updates['id']
for key in ['vnf_deployment_flavours']:
if key in updates:
if key in updates.keys():
updates.pop(key)
for key in ['vnf_artifacts']:
if key in updates.keys():
updates.pop(key)
user_data = updates.pop('user_data', None)
@ -499,7 +543,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
def save(self):
updates = self.tacker_obj_get_changes()
for key in ['vnf_deployment_flavours']:
if key in updates:
if key in updates.keys():
updates.pop(key)
db_vnf_package = _vnf_package_update(self._context,
@ -607,6 +651,12 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
if checksum:
vnf_package_response.update(checksum)
artifacts = self.vnf_artifacts.to_dict(
include_fields=include_fields)
if artifacts:
vnf_package_response.update(
{'additionalArtifacts': artifacts})
return vnf_package_response

View File

@ -115,6 +115,16 @@ rules = [
'path': '/vnf_packages/{vnf_package_id}/vnfd'
}
]),
policy.DocumentedRuleDefault(
name=VNFPKGM % 'fetch_artifact',
check_str=base.RULE_ADMIN_OR_OWNER,
description="reads the content of the artifact within a VNF package.",
operations=[
{
'method': 'GET',
'path': '/vnf_packages/{vnfPkgId}/artifacts/{artifactPath}'
}
]),
]

View File

@ -39,3 +39,6 @@ LEASE_CHECK_EVENT_TIMEOUT = 300
LEASE_CHECK_SLEEP_TIME = 3
UUID = 'f26f181d-7891-4720-b022-b074ec1733ef'
INVALID_UUID = 'f181d-7891-4720-b022-b074ec3ef'
# artifact related
ARTIFACT_PATH = 'Scripts/install.sh'
INVALID_ARTIFACT_PATH = 'Fake_Scripts/fake_install.sh'

View File

@ -1,6 +1,4 @@
# TODO:Manually change from version 1.2 to 1.0
tosca_definitions_version: tosca_simple_yaml_1_2
#tosca_definitions_version: tosca_simple_yaml_1_2
description: ETSI NFV SOL 001 common types definitions version 2.6.1
metadata:
template_name: etsi_nfv_sol001_common_types

View File

@ -1,15 +1,12 @@
# TODO:Manually change from version 1.2 to 1.0
tosca_definitions_version: tosca_simple_yaml_1_2
#tosca_definitions_version: tosca_simple_yaml_1_2
description: ETSI NFV SOL 001 vnfd types definitions version 2.6.1
metadata:
template_name: etsi_nfv_sol001_vnfd_types
template_author: ETSI_NFV
template_version: 2.6.1
# TODO:Manually change from version 1.2 to 1.0
#imports:
# - https://forge.etsi.org/rep/nfv/sol001/raw/v2.6.1/etsi_nfv_sol001_common_types.yaml
imports:
- ./etsi_nfv_sol001_common_types.yaml
data_types:
tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements:

View File

@ -0,0 +1,48 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: curry-test001
namespace: curryns
spec:
replicas: 2
selector:
matchLabels:
app: webserver
template:
metadata:
labels:
app: webserver
scaling_name: SP1
spec:
containers:
- env:
- name: param0
valueFrom:
configMapKeyRef:
key: param0
name: curry-test001
- name: param1
valueFrom:
configMapKeyRef:
key: param1
name: curry-test001
image: celebdor/kuryr-demo
imagePullPolicy: IfNotPresent
name: web-server
ports:
- containerPort: 8080
resources:
limits:
cpu: 500m
memory: 512M
requests:
cpu: 500m
memory: 512M
volumeMounts:
- name: curry-claim-volume
mountPath: /data
volumes:
- name: curry-claim-volume
persistentVolumeClaim:
claimName: curry-pv-claim
terminationGracePeriodSeconds: 0

View File

@ -0,0 +1,3 @@
#!/bin/bash
echo "Hello, World!"

View File

@ -0,0 +1,10 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
ETSI-Entry-Manifest: manifest.mf
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,5 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
ETSI-Entry-Manifest: manifest.mf1

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,9 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-255
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,9 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,9 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Scripts/install.s
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,8 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Files/Scripts/install.sh
Content-Type: test-data
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,9 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Scripts/install.sh
Content-Type: test-data
Algorithm:
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros.img
Algorithm: SHA-512
Hash: <calculate here>
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,5 @@
TOSCA-Meta-File-Version: 1.0
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
ETSI-Entry-Manifest: manifest.mf

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,275 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: Simple deployment flavour for Sample VNF
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
- helloworld3_types.yaml
topology_template:
inputs:
descriptor_id:
type: string
descriptor_version:
type: string
provider:
type: string
product_name:
type: string
software_version:
type: string
vnfm_info:
type: list
entry_schema:
type: string
flavour_id:
type: string
flavour_description:
type: string
substitution_mappings:
node_type: company.provider.VNF
properties:
flavour_id: simple
requirements:
virtual_link_external: [ CP1, virtual_link ]
node_templates:
VNF:
type: company.provider.VNF
properties:
flavour_description: A simple flavour
interfaces:
Vnflcm:
# supporting only 'instantiate', 'terminate', 'modify'
# not supporting LCM script, supporting only default LCM
instantiate: []
instantiate_start: []
instantiate_end: []
terminate: []
terminate_start: []
terminate_end: []
modify_information: []
modify_information_start: []
modify_information_end: []
# change_flavour: []
# change_flavour_start: []
# change_flavour_end: []
# change_external_connectivity: []
# change_external_connectivity_start: []
# change_external_connectivity_end: []
# operate: []
# operate_start: []
# operate_end: []
# heal: []
# heal_start: []
# heal_end: []
# scale: []
# scale_start: []
# scale_end: []
# scale_to_level: []
# scale_to_level_start: []
# scale_to_level_end: []
VDU1:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU1
description: VDU1 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 1
sw_image_data:
name: Software of VDU1
version: '0.4.0'
checksum:
algorithm: sha-256
hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d
container_format: bare
disk_format: qcow2
min_disk: 1 GB
size: 1 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 1 GB
VDU2:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU2
description: VDU2 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 3
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 1 GB
requirements:
- virtual_storage: VirtualStorage
VirtualStorage:
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
properties:
virtual_block_storage_data:
size_of_storage: 30 GB
rdma_enabled: true
sw_image_data:
name: VrtualStorage
version: '0.4.0'
checksum:
algorithm: sha-256
hash: b9c3036539fd7a5f87a1bf38eb05fdde8b556a1a7e664dbeda90ed3cd74b4f9d
container_format: bare
disk_format: qcow2
min_disk: 2 GB
min_ram: 8192 MB
size: 2 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
CP1:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 0
vnic_type: direct-physical
requirements:
- virtual_binding: VDU1
#- virtual_link: # the target node is determined in the NSD
CP2:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 1
requirements:
- virtual_binding: VDU1
- virtual_link: internalVL2
CP3:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 2
requirements:
- virtual_binding: VDU2
- virtual_link: internalVL2
internalVL2:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 11.11.0.0/24
policies:
- scaling_aspects:
type: tosca.policies.nfv.ScalingAspects
properties:
aspects:
worker_instance:
name: worker_instance_aspect
description: worker_instance scaling aspect
max_scale_level: 2
step_deltas:
- delta_1
- VDU2_initial_delta:
type: tosca.policies.nfv.VduInitialDelta
properties:
initial_delta:
number_of_instances: 1
targets: [ VDU2 ]
- VDU2_scaling_aspect_deltas:
type: tosca.policies.nfv.VduScalingAspectDeltas
properties:
aspect: worker_instance
deltas:
delta_2:
number_of_instances: 1
targets: [ VDU2 ]
- instantiation_levels:
type: tosca.policies.nfv.InstantiationLevels
properties:
levels:
instantiation_level_1:
description: Smallest size
scale_info:
worker_instance:
scale_level: 0
instantiation_level_2:
description: Largest size
scale_info:
worker_instance:
scale_level: 2
default_level: instantiation_level_1
- VDU1_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 3
targets: [ VDU1 ]
- VDU2_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 1
targets: [ VDU2 ]
- internalVL2_instantiation_levels:
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
properties:
levels:
instantiation_level_1:
bitrate_requirements:
root: 1048576
leaf: 1048576
instantiation_level_2:
bitrate_requirements:
root: 1048576
leaf: 1048576
targets: [ internalVL2 ]

View File

@ -0,0 +1,31 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: Sample VNF
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
- helloworld3_types.yaml
- helloworld3_df_simple.yaml
topology_template:
inputs:
selected_flavour:
type: string
description: VNF deployment flavour selected by the consumer. It is provided in the API
node_templates:
VNF:
type: company.provider.VNF
properties:
flavour_id: { get_input: selected_flavour }
descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177
provider: Company
product_name: Sample VNF
software_version: '1.0'
descriptor_version: '1.0'
vnfm_info:
- Tacker
requirements:
#- virtual_link_external # mapped in lower-level templates
#- virtual_link_internal # mapped in lower-level templates

View File

@ -0,0 +1,53 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: VNF type definition
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
node_types:
company.provider.VNF:
derived_from: tosca.nodes.nfv.VNF
properties:
descriptor_id:
type: string
constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 ] ]
default: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177
descriptor_version:
type: string
constraints: [ valid_values: [ '1.0' ] ]
default: '1.0'
provider:
type: string
constraints: [ valid_values: [ 'Company' ] ]
default: 'Company'
product_name:
type: string
constraints: [ valid_values: [ 'Sample VNF' ] ]
default: 'Sample VNF'
software_version:
type: string
constraints: [ valid_values: [ '1.0' ] ]
default: '1.0'
vnfm_info:
type: list
entry_schema:
type: string
constraints: [ valid_values: [ Tacker ] ]
default: [ Tacker ]
flavour_id:
type: string
constraints: [ valid_values: [ simple ] ]
default: simple
flavour_description:
type: string
default: ""
requirements:
- virtual_link_external:
capability: tosca.capabilities.nfv.VirtualLinkable
- virtual_link_internal:
capability: tosca.capabilities.nfv.VirtualLinkable
interfaces:
Vnflcm:
type: tosca.interfaces.nfv.Vnflcm

View File

@ -0,0 +1,7 @@
TOSCA-Meta-File-Version: 1.0
Created-by: Tacker
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Files/images/cirros-0.4.0-x86_64-disk.img
Content-type: application/x-iso9066-image

View File

@ -1,7 +1,22 @@
TOSCA-Meta-File-Version: 1.0
Created-by: Hiroyuki JO
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
Name: Files/images/cirros-0.4.0-x86_64-disk.img
Content-type: application/x-iso9066-image
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Name: Files/kubernetes/deployment.yaml
Content-Type: test-data
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -1,7 +1,8 @@
TOSCA-Meta-File-Version: 1.0
Created-by: Hiroyuki JO
Created-by: dummy_user
CSAR-Version: 1.1
Entry-Definitions: Definitions/helloworld3_top.vnfd.yaml
ETSI-Entry-Manifest: manifest.mf
Name: Files/images/cirros-0.4.0-x86_64-disk.img
Content-type: application/x-iso9066-image

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 9a318acd9d049bbd6a8c662b18be95414b3b1f2e3d27e38e59bf99347193ac2831220aa54296ee35aee8f53c399000f095c8eca7eb611c5b2c83eeb7c30834a8
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros-0.4.0-x86_64-disk.img
Algorithm: SHA-512
Hash: 9a318acd9d049bbd6a8c662b18be95414b3b1f2e3d27e38e59bf99347193ac2831220aa54296ee35aee8f53c399000f095c8eca7eb611c5b2c83eeb7c30834a8
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,202 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: ETSI NFV SOL 001 common types definitions version 2.6.1
metadata:
template_name: etsi_nfv_sol001_common_types
template_author: ETSI_NFV
template_version: 2.6.1
data_types:
tosca.datatypes.nfv.L2AddressData:
derived_from: tosca.datatypes.Root
description: Describes the information on the MAC addresses to be assigned to a connection point.
properties:
mac_address_assignment:
type: boolean
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
required: true
tosca.datatypes.nfv.L3AddressData:
derived_from: tosca.datatypes.Root
description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
properties:
ip_address_assignment:
type: boolean
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
required: true
floating_ip_activated:
type: boolean
description: Specifies if the floating IP scheme is activated on the Connection Point or not
required: true
ip_address_type:
type: string
description: Defines address type. The address type should be aligned with the address type supported by the layer_protocols properties of the parent VnfExtCp
required: false
constraints:
- valid_values: [ ipv4, ipv6 ]
number_of_ip_address:
type: integer
description: Minimum number of IP addresses to be assigned
required: false
constraints:
- greater_than: 0
tosca.datatypes.nfv.AddressData:
derived_from: tosca.datatypes.Root
description: Describes information about the addressing scheme and parameters applicable to a CP
properties:
address_type:
type: string
description: Describes the type of the address to be assigned to a connection point. The content type shall be aligned with the address type supported by the layerProtocol property of the connection point
required: true
constraints:
- valid_values: [ mac_address, ip_address ]
l2_address_data:
type: tosca.datatypes.nfv.L2AddressData
description: Provides the information on the MAC addresses to be assigned to a connection point.
required: false
l3_address_data:
type: tosca.datatypes.nfv.L3AddressData
description: Provides the information on the IP addresses to be assigned to a connection point
required: false
tosca.datatypes.nfv.ConnectivityType:
derived_from: tosca.datatypes.Root
description: describes additional connectivity information of a virtualLink
properties:
layer_protocols:
type: list
description: Identifies the protocol a virtualLink gives access to (ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire).The top layer protocol of the virtualLink protocol stack shall always be provided. The lower layer protocols may be included when there are specific requirements on these layers.
required: true
entry_schema:
type: string
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
flow_pattern:
type: string
description: Identifies the flow pattern of the connectivity
required: false
constraints:
- valid_values: [ line, tree, mesh ]
tosca.datatypes.nfv.LinkBitrateRequirements:
derived_from: tosca.datatypes.Root
description: describes the requirements in terms of bitrate for a virtual link
properties:
root:
type: integer # in bits per second
description: Specifies the throughput requirement in bits per second of the link (e.g. bitrate of E-Line, root bitrate of E-Tree, aggregate capacity of E-LAN).
required: true
constraints:
- greater_or_equal: 0
leaf:
type: integer # in bits per second
description: Specifies the throughput requirement in bits per second of leaf connections to the link when applicable to the connectivity type (e.g. for E-Tree and E LAN branches).
required: false
constraints:
- greater_or_equal: 0
tosca.datatypes.nfv.CpProtocolData:
derived_from: tosca.datatypes.Root
description: Describes and associates the protocol layer that a CP uses together with other protocol and connection point information
properties:
associated_layer_protocol:
type: string
required: true
description: One of the values of the property layer_protocols of the CP
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
address_data:
type: list
description: Provides information on the addresses to be assigned to the CP
entry_schema:
type: tosca.datatypes.nfv.AddressData
required: false
tosca.datatypes.nfv.VnfProfile:
derived_from: tosca.datatypes.Root
description: describes a profile for instantiating VNFs of a particular NS DF according to a specific VNFD and VNF DF.
properties:
instantiation_level:
type: string
description: Identifier of the instantiation level of the VNF DF to be used for instantiation. If not present, the default instantiation level as declared in the VNFD shall be used.
required: false
min_number_of_instances:
type: integer
description: Minimum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
required: true
constraints:
- greater_or_equal: 0
max_number_of_instances:
type: integer
description: Maximum number of instances of the VNF based on this VNFD that is permitted to exist for this VnfProfile.
required: true
constraints:
- greater_or_equal: 0
tosca.datatypes.nfv.Qos:
derived_from: tosca.datatypes.Root
description: describes QoS data for a given VL used in a VNF deployment flavour
properties:
latency:
type: scalar-unit.time #Number
description: Specifies the maximum latency
required: true
constraints:
- greater_than: 0 s
packet_delay_variation:
type: scalar-unit.time #Number
description: Specifies the maximum jitter
required: true
constraints:
- greater_or_equal: 0 s
packet_loss_ratio:
type: float
description: Specifies the maximum packet loss ratio
required: false
constraints:
- in_range: [ 0.0, 1.0 ]
capability_types:
tosca.capabilities.nfv.VirtualLinkable:
derived_from: tosca.capabilities.Node
description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type
relationship_types:
tosca.relationships.nfv.VirtualLinksTo:
derived_from: tosca.relationships.DependsOn
description: Represents an association relationship between the VduCp and VnfVirtualLink node types
valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
node_types:
tosca.nodes.nfv.Cp:
derived_from: tosca.nodes.Root
description: Provides information regarding the purpose of the connection point
properties:
layer_protocols:
type: list
description: Identifies which protocol the connection point uses for connectivity purposes
required: true
entry_schema:
type: string
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
type: string
description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
required: false
constraints:
- valid_values: [ root, leaf ]
description:
type: string
description: Provides human-readable information on the purpose of the connection point
required: false
protocol:
type: list
description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
required: false
entry_schema:
type: tosca.datatypes.nfv.CpProtocolData
trunk_mode:
type: boolean
description: Provides information about whether the CP instantiated from this Cp is in Trunk mode (802.1Q or other), When operating in "trunk mode", the Cp is capable of carrying traffic for several VLANs. Absence of this property implies that trunkMode is not configured for the Cp i.e. It is equivalent to boolean value "false".
required: false

View File

@ -0,0 +1,308 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: Complex deployment flavour for Sample VNF
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
- sample_vnfd_types.yaml
topology_template:
inputs:
id:
type: string
vendor:
type: string
version:
type: version
descriptor_id:
type: string
descriptor_version:
type: string
provider:
type: string
product_name:
type: string
software_version:
type: string
vnfm_info:
type: list
entry_schema:
type: string
flavour_id:
type: string
flavour_description:
type: string
substitution_mappings:
node_type: company.provider.VNF
properties:
flavour_id: complex
requirements:
virtual_link_external: [ CP1, virtual_link ]
node_templates:
VNF:
type: company.provider.VNF
properties:
flavour_description: A complex flavour
interfaces:
Vnflcm:
# supporting only 'instantiate', 'terminate', 'modify'
# not supporting LCM script, supporting only default LCM
instantiate: []
instantiate_start: []
instantiate_end: []
terminate: []
terminate_start: []
terminate_end: []
modify_information: []
modify_information_start: []
modify_information_end: []
# change_flavour: []
# change_flavour_start: []
# change_flavour_end: []
# change_external_connectivity: []
# change_external_connectivity_start: []
# change_external_connectivity_end: []
# operate: []
# operate_start: []
# operate_end: []
heal: []
# heal_start: []
# heal_end: []
# scale: []
# scale_start: []
# scale_end: []
# scale_to_level: []
# scale_to_level_start: []
# scale_to_level_end: []
VDU1:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU1
description: VDU1 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 1
sw_image_data:
name: Software of VDU1
version: '0.4.0'
checksum:
algorithm: sha-512
hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
container_format: bare
disk_format: qcow2
min_disk: 1 GB
size: 1 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 3 GB
VDU2:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU2
description: VDU2 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 3
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 3 GB
requirements:
- virtual_storage: VirtualStorage
VirtualStorage:
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
properties:
virtual_block_storage_data:
size_of_storage: 3 GB
rdma_enabled: true
sw_image_data:
name: VirtualStorage
version: '0.4.0'
checksum:
algorithm: sha-512
hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
container_format: bare
disk_format: qcow2
min_disk: 2 GB
min_ram: 256 MB
size: 1 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
CP1:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 0
requirements:
- virtual_binding: VDU1
- virtual_link: internalVL1
CP2:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 1
requirements:
- virtual_binding: VDU1
- virtual_link: internalVL2
CP3:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 2
requirements:
- virtual_binding: VDU2
- virtual_link: internalVL1
CP4:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 3
requirements:
- virtual_binding: VDU2
- virtual_link: internalVL2
internalVL1:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 11.10.0.0/24
internalVL2:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 11.11.0.0/24
policies:
- scaling_aspects:
type: tosca.policies.nfv.ScalingAspects
properties:
aspects:
worker_instance:
name: worker_instance_aspect
description: worker_instance scaling aspect
max_scale_level: 2
step_deltas:
- delta_2
- VDU2_initial_delta:
type: tosca.policies.nfv.VduInitialDelta
properties:
initial_delta:
number_of_instances: 1
targets: [ VDU2 ]
- VDU2_scaling_aspect_deltas:
type: tosca.policies.nfv.VduScalingAspectDeltas
properties:
aspect: worker_instance
deltas:
delta_2:
number_of_instances: 1
targets: [ VDU2 ]
- instantiation_levels:
type: tosca.policies.nfv.InstantiationLevels
properties:
levels:
instantiation_level_1:
description: Smallest size
scale_info:
worker_instance:
scale_level: 0
instantiation_level_2:
description: Largest size
scale_info:
worker_instance:
scale_level: 2
default_level: instantiation_level_1
- VDU1_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 3
targets: [ VDU1 ]
- VDU2_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 1
targets: [ VDU2 ]
- internalVL2_instantiation_levels:
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
properties:
levels:
instantiation_level_1:
bitrate_requirements:
root: 1048576
leaf: 1048576
instantiation_level_2:
bitrate_requirements:
root: 1048576
leaf: 1048576
targets: [ internalVL2 ]

View File

@ -0,0 +1,308 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: Simple deployment flavour for Sample VNF
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
- sample_vnfd_types.yaml
topology_template:
inputs:
id:
type: string
vendor:
type: string
version:
type: version
descriptor_id:
type: string
descriptor_version:
type: string
provider:
type: string
product_name:
type: string
software_version:
type: string
vnfm_info:
type: list
entry_schema:
type: string
flavour_id:
type: string
flavour_description:
type: string
substitution_mappings:
node_type: company.provider.VNF
properties:
flavour_id: simple
requirements:
virtual_link_external: [ CP1, virtual_link ]
node_templates:
VNF:
type: company.provider.VNF
properties:
flavour_description: A simple flavour
interfaces:
Vnflcm:
# supporting only 'instantiate', 'terminate', 'modify'
# not supporting LCM script, supporting only default LCM
instantiate: []
instantiate_start: []
instantiate_end: []
terminate: []
terminate_start: []
terminate_end: []
modify_information: []
modify_information_start: []
modify_information_end: []
# change_flavour: []
# change_flavour_start: []
# change_flavour_end: []
# change_external_connectivity: []
# change_external_connectivity_start: []
# change_external_connectivity_end: []
# operate: []
# operate_start: []
# operate_end: []
heal: []
# heal_start: []
# heal_end: []
# scale: []
# scale_start: []
# scale_end: []
# scale_to_level: []
# scale_to_level_start: []
# scale_to_level_end: []
VDU1:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU1
description: VDU1 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 1
sw_image_data:
name: Software of VDU1
version: '0.4.0'
checksum:
algorithm: sha-512
hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
container_format: bare
disk_format: qcow2
min_disk: 1 GB
size: 1 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 3 GB
VDU2:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU2
description: VDU2 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 3
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 3 GB
requirements:
- virtual_storage: VirtualStorage
VirtualStorage:
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
properties:
virtual_block_storage_data:
size_of_storage: 3 GB
rdma_enabled: true
sw_image_data:
name: VirtualStorage
version: '0.4.0'
checksum:
algorithm: sha-512
hash: 6513f21e44aa3da349f248188a44bc304a3653a04122d8fb4535423c8e1d14cd6a153f735bb0982e2161b5b5186106570c17a9e58b64dd39390617cd5a350f78
container_format: bare
disk_format: qcow2
min_disk: 2 GB
min_ram: 256 MB
size: 1 GB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: ../Files/images/cirros-0.4.0-x86_64-disk.img
CP1:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 0
requirements:
- virtual_binding: VDU1
- virtual_link: internalVL1
CP2:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 1
requirements:
- virtual_binding: VDU1
- virtual_link: internalVL2
CP3:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 2
requirements:
- virtual_binding: VDU2
- virtual_link: internalVL1
CP4:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 3
requirements:
- virtual_binding: VDU2
- virtual_link: internalVL2
internalVL1:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 11.10.0.0/24
internalVL2:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 11.11.0.0/24
policies:
- scaling_aspects:
type: tosca.policies.nfv.ScalingAspects
properties:
aspects:
worker_instance:
name: worker_instance_aspect
description: worker_instance scaling aspect
max_scale_level: 2
step_deltas:
- delta_2
- VDU2_initial_delta:
type: tosca.policies.nfv.VduInitialDelta
properties:
initial_delta:
number_of_instances: 1
targets: [ VDU2 ]
- VDU2_scaling_aspect_deltas:
type: tosca.policies.nfv.VduScalingAspectDeltas
properties:
aspect: worker_instance
deltas:
delta_2:
number_of_instances: 1
targets: [ VDU2 ]
- instantiation_levels:
type: tosca.policies.nfv.InstantiationLevels
properties:
levels:
instantiation_level_1:
description: Smallest size
scale_info:
worker_instance:
scale_level: 0
instantiation_level_2:
description: Largest size
scale_info:
worker_instance:
scale_level: 2
default_level: instantiation_level_1
- VDU1_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 3
targets: [ VDU1 ]
- VDU2_instantiation_levels:
type: tosca.policies.nfv.VduInstantiationLevels
properties:
levels:
instantiation_level_1:
number_of_instances: 1
instantiation_level_2:
number_of_instances: 1
targets: [ VDU2 ]
- internalVL2_instantiation_levels:
type: tosca.policies.nfv.VirtualLinkInstantiationLevels
properties:
levels:
instantiation_level_1:
bitrate_requirements:
root: 1048576
leaf: 1048576
instantiation_level_2:
bitrate_requirements:
root: 1048576
leaf: 1048576
targets: [ internalVL2 ]

View File

@ -0,0 +1,32 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: Sample VNF of Company Provider.
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
- sample_vnfd_types.yaml
- sample_vnfd_df_simple_int.yaml
- sample_vnfd_df_complex_int.yaml
topology_template:
inputs:
selected_flavour:
type: string
description: VNF deployment flavour selected by the consumer. It is provided in the API
node_templates:
VNF:
type: company.provider.VNF
properties:
flavour_id: { get_input: selected_flavour }
descriptor_id: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177
provider: Company Provider
product_name: Sample VNF
software_version: '1.0'
descriptor_version: '1.0'
vnfm_info:
- Tacker
requirements:
#- virtual_link_external # mapped in lower-level templates
#- virtual_link_internal # mapped in lower-level templates

View File

@ -0,0 +1,65 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: company.provider.VNF type definition
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
node_types:
company.provider.VNF:
derived_from: tosca.nodes.nfv.VNF
properties:
id:
type: string
description: ID of this VNF
default: vnf_id
vendor:
type: string
description: name of the vendor who generate this VNF
default: vendor
version:
type: version
description: version of the software for this VNF
default: 1.0
descriptor_id:
type: string
constraints: [ valid_values: [ b1bb0ce7-ebca-4fa7-95ed-4840d70a1177 ] ]
default: b1bb0ce7-ebca-4fa7-95ed-4840d70a1177
descriptor_version:
type: string
constraints: [ valid_values: [ '1.0' ] ]
default: '1.0'
provider:
type: string
constraints: [ valid_values: [ ' Company Provider lab' ] ]
default: 'Company Provider'
product_name:
type: string
constraints: [ valid_values: [ 'Sample VNF' ] ]
default: 'Sample VNF'
software_version:
type: string
constraints: [ valid_values: [ '1.0' ] ]
default: '1.0'
vnfm_info:
type: list
entry_schema:
type: string
constraints: [ valid_values: [ Tacker ] ]
default: [ Tacker ]
flavour_id:
type: string
constraints: [ valid_values: [ simple, complex ] ]
default: simple
flavour_description:
type: string
default: "This is the default flavour description"
requirements:
- virtual_link_external:
capability: tosca.capabilities.nfv.VirtualLinkable
- virtual_link_internal:
capability: tosca.capabilities.nfv.VirtualLinkable
interfaces:
Vnflcm:
type: tosca.interfaces.nfv.Vnflcm

View File

@ -0,0 +1 @@
#### THIS IS A DUMMY FILE TO SAVE FILE SIZE #####

View File

@ -0,0 +1 @@
#### THIS IS A DUMMY FILE TO SAVE FILE SIZE #####

View File

@ -0,0 +1,48 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: curry-test001
namespace: curryns
spec:
replicas: 2
selector:
matchLabels:
app: webserver
template:
metadata:
labels:
app: webserver
scaling_name: SP1
spec:
containers:
- env:
- name: param0
valueFrom:
configMapKeyRef:
key: param0
name: curry-test001
- name: param1
valueFrom:
configMapKeyRef:
key: param1
name: curry-test001
image: celebdor/kuryr-demo
imagePullPolicy: IfNotPresent
name: web-server
ports:
- containerPort: 8080
resources:
limits:
cpu: 500m
memory: 512M
requests:
cpu: 500m
memory: 512M
volumeMounts:
- name: curry-claim-volume
mountPath: /data
volumes:
- name: curry-claim-volume
persistentVolumeClaim:
claimName: curry-pv-claim
terminationGracePeriodSeconds: 0

View File

@ -0,0 +1,3 @@
#!/bin/bash
echo "Hello, World!"

View File

@ -0,0 +1,10 @@
TOSCA-Meta-File-Version: 1.0
Created-by: Tacker
CSAR-Version: 1.1
Entry-Definitions: Definitions/sample_vnfd_top.yaml
ETSI-Entry-Manifest: manifest.mf
Name: Scripts/install.sh
Content-Type: test-data
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a

View File

@ -0,0 +1,11 @@
Source: Files/images/cirros.img
Algorithm: SHA-512
Hash: 9a318acd9d049bbd6a8c662b18be95414b3b1f2e3d27e38e59bf99347193ac2831220aa54296ee35aee8f53c399000f095c8eca7eb611c5b2c83eeb7c30834a8
Source: Scripts/install.sh
Algorithm: SHA-256
Hash: 27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef7b6f50d6674b34f02a
Source: Files/kubernetes/deployment.yaml
Algorithm: SHA-256
Hash: e23cc3433835cea32ce790b4823313dc6d0744dce02e27b1b339c87ee993b8c2

View File

@ -30,7 +30,7 @@ topology_template:
type: string
substitution_mappings:
node_type: ntt.nslab.VNF
node_type: company.provider.VNF
properties:
flavour_id: simple
requirements:
@ -38,7 +38,7 @@ topology_template:
node_templates:
VNF:
type: ntt.nslab.VNF
type: company.provider.VNF
properties:
flavour_description: A simple flavour
interfaces:

View File

@ -62,6 +62,8 @@ class SessionClient(adapter.Adapter):
body = response.text
if body and response.headers['Content-Type'] == 'text/plain':
return body
elif body and response.headers['Content-Type'] == 'text/x-sh':
return body
elif body:
return jsonutils.loads(body)
else:

View File

@ -52,8 +52,14 @@ class VnfPackageTest(base.BaseTackerTest):
resp, self.package2 = self.http_client.do_request(show_url, "GET")
self.assertEqual(200, resp.status_code)
self.package_id3 = self._create_and_upload_vnf("vnfpkgm3")
show_url = self.base_url + "/" + self.package_id3
resp, self.package3 = self.http_client.do_request(show_url, "GET")
self.assertEqual(200, resp.status_code)
def tearDown(self):
for package_id in [self.package_id1, self.package_id2]:
for package_id in [self.package_id1, self.package_id2,
self.package_id3]:
self._disable_operational_state(package_id)
self._delete_vnf_package(package_id)
self._wait_for_delete(package_id)
@ -112,18 +118,19 @@ class VnfPackageTest(base.BaseTackerTest):
"""Creates and deletes a vnf package."""
# Create vnf package
body = jsonutils.dumps({"userDefinedData": {"foo": "bar"}})
vnf_package = self._create_vnf_package(body)
package_uuid = vnf_package['id']
vnf_package_id = self._create_and_upload_vnf('vnfpkgm1')
# show vnf package
show_url = self.base_url + "/" + package_uuid
show_url = self.base_url + "/" + vnf_package_id
resp, body = self.http_client.do_request(show_url, "GET")
self.assertEqual(200, resp.status_code)
# update vnf package
self._disable_operational_state(vnf_package_id)
# Delete vnf package
self._delete_vnf_package(package_uuid)
self._wait_for_delete(package_uuid)
self._delete_vnf_package(vnf_package_id)
self._wait_for_delete(vnf_package_id)
# show vnf package should fail as it's deleted
resp, body = self.http_client.do_request(show_url, "GET")
@ -159,7 +166,13 @@ class VnfPackageTest(base.BaseTackerTest):
body = jsonutils.dumps({"userDefinedData": {"foo": "bar"}})
vnf_package = self._create_vnf_package(body)
csar_dir = self._get_csar_dir_path(sample_name)
file_path, vnfd_id = utils.create_csar_with_unique_vnfd_id(csar_dir)
if os.path.exists(os.path.join(csar_dir, 'TOSCA-Metadata')) and \
sample_name != 'vnfpkgm2':
file_path = utils.create_csar_with_unique_artifact(
csar_dir)
else:
file_path, vnfd_id = utils.create_csar_with_unique_vnfd_id(
csar_dir)
self.addCleanup(os.remove, file_path)
with open(file_path, 'rb') as file_object:
@ -246,7 +259,7 @@ class VnfPackageTest(base.BaseTackerTest):
"key1": "changed_val1", "new_key": "new_val"}}
csar_dir = self._get_csar_dir_path("vnfpkgm1")
file_path, vnfd_id = utils.create_csar_with_unique_vnfd_id(csar_dir)
file_path = utils.create_csar_with_unique_artifact(csar_dir)
self.addCleanup(os.remove, file_path)
with open(file_path, 'rb') as file_object:
resp, resp_body = self.http_client.do_request(
@ -273,12 +286,14 @@ class VnfPackageTest(base.BaseTackerTest):
filter_expr = {
'filter': "(gt,softwareImages/minDisk,7);"
"(eq,onboardingState,ONBOARDED);"
"(eq,softwareImages/checksum/algorithm,'sha-512')"
"(eq,softwareImages/checksum/algorithm,'sha-512');"
"(eq,additionalArtifacts/checksum/algorithm,'sha-256')"
}
filter_url = self.base_url + "?" + urllib.parse.urlencode(filter_expr)
resp, body = self.http_client.do_request(filter_url, "GET")
package = deepcopy(self.package2)
for attr in ['softwareImages', 'checksum', 'userDefinedData']:
for attr in ['softwareImages', 'checksum', 'userDefinedData',
'additionalArtifacts']:
package.pop(attr, None)
expected_result = [package]
self.assertEqual(expected_result, body)
@ -307,19 +322,24 @@ class VnfPackageTest(base.BaseTackerTest):
filter_url = self.base_url + "?" + urllib.parse.urlencode(filter_expr)
resp, body = self.http_client.do_request(filter_url, "GET")
package2 = deepcopy(self.package2)
for attr in ['softwareImages', 'checksum', 'userDefinedData']:
for attr in ['softwareImages', 'checksum', 'userDefinedData',
'additionalArtifacts']:
package2.pop(attr, None)
expected_result = [package2]
self.assertEqual(expected_result, body)
def test_index_attribute_selector_exclude_fields(self):
filter_expr = {'filter': '(eq,id,%s)' % self.package_id2,
'exclude_fields': 'checksum,softwareImages/checksum'}
filter_expr = {
'filter': '(eq,id,%s)' % self.package_id2,
'exclude_fields': 'checksum,softwareImages/checksum,'
'additionalArtifacts/checksum'}
filter_url = self.base_url + "?" + urllib.parse.urlencode(filter_expr)
resp, body = self.http_client.do_request(filter_url, "GET")
package2 = deepcopy(self.package2)
for software_image in package2['softwareImages']:
software_image.pop('checksum', None)
for artifact in package2['additionalArtifacts']:
artifact.pop('checksum', None)
package2.pop('checksum', None)
expected_result = [package2]
self.assertEqual(expected_result, body)
@ -328,7 +348,8 @@ class VnfPackageTest(base.BaseTackerTest):
filter_expr = {'filter': '(eq,id,%s)' % self.package_id1,
'fields': 'softwareImages/checksum/hash,'
'softwareImages/containerFormat,softwareImages/name,'
'userDefinedData'}
'userDefinedData,additionalArtifacts/checksum/hash,'
'additionalArtifacts/artifactPath'}
filter_url = self.base_url + "?" + urllib.parse.urlencode(filter_expr)
resp, body = self.http_client.do_request(filter_url, "GET")
package1 = deepcopy(self.package1)
@ -340,6 +361,9 @@ class VnfPackageTest(base.BaseTackerTest):
'minDisk', 'minRam', 'provider', 'size', 'userMetadata',
'version']:
software_image.pop(attr, None)
for artifact in package1['additionalArtifacts']:
artifact['checksum'].pop('algorithm', None)
artifact.pop('metadata', None)
package1.pop('checksum', None)
expected_result = [package1]
self.assertEqual(expected_result, body)
@ -413,7 +437,7 @@ class VnfPackageTest(base.BaseTackerTest):
id=self.package_id1, base_path=self.base_url),
"GET", body={}, headers={})
self.assertEqual(200, response[0].status_code)
self.assertEqual('12802866', response[0].headers['Content-Length'])
self.assertEqual('12804503', response[0].headers['Content-Length'])
def test_fetch_vnf_package_content_combined_download(self):
"""Combine two partial downloads using 'Range' requests for csar zip"""
@ -434,7 +458,7 @@ class VnfPackageTest(base.BaseTackerTest):
zipf.writestr(file_path, data)
# Partial download 2
range_ = 'bytes=11-12802866'
range_ = 'bytes=11-12804503'
headers = {'Range': range_}
response_2 = self.http_client.do_request(
'{base_path}/{id}/package_content'.format(
@ -447,5 +471,51 @@ class VnfPackageTest(base.BaseTackerTest):
size_2 = int(response_2[0].headers['Content-Length'])
total_size = size_1 + size_2
self.assertEqual(True, zipfile.is_zipfile(zip_file_path))
self.assertEqual(12802866, total_size)
self.assertEqual(12804503, total_size)
zip_file_path.close()
def test_fetch_vnf_package_artifacts(self):
# run download api
response1 = self.http_client.do_request(
'{base_path}/{id}/artifacts/{artifact_path}'.format(
base_path=self.base_url, id=self.package_id1,
artifact_path='Scripts/install.sh'),
"GET", body={}, headers={})
response2 = self.http_client.do_request(
'{base_path}/{id}/artifacts/{artifact_path}'.format(
base_path=self.base_url, id=self.package_id2,
artifact_path='Scripts/install.sh'),
"GET", body={}, headers={})
response3 = self.http_client.do_request(
'{base_path}/{id}/artifacts/{artifact_path}'.format(
base_path=self.base_url, id=self.package_id3,
artifact_path='Scripts/install.sh'),
"GET", body={}, headers={})
# verification
self.assertEqual(200, response1[0].status_code)
self.assertEqual('33', response1[0].headers['Content-Length'])
self.assertIsNotNone(response1[1])
self.assertEqual(200, response2[0].status_code)
self.assertEqual('33', response2[0].headers['Content-Length'])
self.assertIsNotNone(response2[1])
self.assertEqual(200, response3[0].status_code)
self.assertEqual('33', response3[0].headers['Content-Length'])
self.assertIsNotNone(response3[1])
def test_fetch_vnf_package_artifacts_partial_download_using_range(self):
# get range
range_ = 'bytes=3-8'
# get headers
headers = {'Range': range_}
# request download api
response = self.http_client.do_request(
'{base_path}/{id}/artifacts/{artifact_path}'.format(
base_path=self.base_url, id=self.package_id1,
artifact_path='Scripts/install.sh'),
"GET", body={}, headers=headers)
# verification
self.assertEqual(206, response[0].status_code)
self.assertEqual('6', response[0].headers['Content-Length'])
self.assertIsNotNone(response[1])

View File

@ -36,9 +36,10 @@ class TestCSARUtils(testtools.TestCase):
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data(self, mock_extract_csar_zip_file):
file_path, _ = utils.create_csar_with_unique_vnfd_id(
'./tacker/tests/etc/samples/etsi/nfv/vnfpkgm1')
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnfpkg_tosca_vnfd')
self.addCleanup(os.remove, file_path)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
@ -52,7 +53,7 @@ class TestCSARUtils(testtools.TestCase):
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnfpkg_no_meta_single_vnfd')
self.addCleanup(os.remove, file_path)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
@ -89,6 +90,193 @@ class TestCSARUtils(testtools.TestCase):
zcsar.close()
return tempname
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_in_meta_and_manifest_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest')
self.addCleanup(os.remove, file_path)
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
self.assertEqual(flavours[0]['flavour_id'], 'simple')
self.assertIsNotNone(flavours[0]['sw_images'])
self.assertIsNotNone(vnf_artifacts)
self.assertIsNotNone(vnf_artifacts[0]['Source'])
self.assertIsNotNone(vnf_artifacts[0]['Hash'])
for item in vnf_artifacts:
flag = item.get('Source').lower().endswith('.img')
self.assertEqual(flag, False)
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_with_single_manifest_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_manifest')
self.addCleanup(os.remove, file_path)
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
self.assertEqual(flavours[0]['flavour_id'], 'simple')
self.assertIsNotNone(flavours[0]['sw_images'])
self.assertIsNotNone(vnf_artifacts)
self.assertIsNotNone(vnf_artifacts[0]['Source'])
self.assertIsNotNone(vnf_artifacts[0]['Hash'])
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_with_single_meta_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_meta')
self.addCleanup(os.remove, file_path)
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
self.assertEqual(flavours[0]['flavour_id'], 'simple')
self.assertIsNotNone(flavours[0]['sw_images'])
self.assertIsNotNone(vnf_artifacts)
self.assertIsNotNone(vnf_artifacts[0]['Source'])
self.assertIsNotNone(vnf_artifacts[0]['Hash'])
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_meta_in_manifest_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_meta_in_manifest')
self.addCleanup(os.remove, file_path)
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
self.assertEqual(flavours[0]['flavour_id'], 'simple')
self.assertIsNotNone(flavours[0]['sw_images'])
self.assertIsNotNone(vnf_artifacts)
self.assertIsNotNone(vnf_artifacts[0]['Source'])
self.assertIsNotNone(vnf_artifacts[0]['Hash'])
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_false_mf_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_false')
self.addCleanup(os.remove, file_path)
manifest_path = 'manifest.mf1'
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
msg = (('The file "%(manifest)s" in the CSAR "%(csar)s" does not '
'contain valid manifest.') %
{'manifest': manifest_path, 'csar': file_path})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_false_mf_name_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_single_manifest_false_name')
self.addCleanup(os.remove, file_path)
manifest_path = 'VNF1.mf'
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
msg = (('The filename "%(manifest)s" is an invalid name.'
'The name must be the same as the main template '
'file name.') %
{'manifest': manifest_path, 'csar': file_path})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_false_hash_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_false_hash')
self.addCleanup(os.remove, file_path)
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
hash_code = '27bbdb25d8f4ed6d07d6f6581b86515e8b2f' \
'0059b236ef7b6f50d6674b34f02'
artifact_path = 'Scripts/install.sh'
msg = (('The hash "%(hash)s" of artifact file '
'"%(artifact)s" is an invalid value.') %
{'hash': hash_code, 'artifact': artifact_path})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_missing_key_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_missing_key')
self.addCleanup(os.remove, file_path)
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
key_name = sorted(['Algorithm'])
msg = (('One of the artifact information may not have '
'the key("%(key)s")') % {'key': key_name})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_missing_value_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_missing_value')
self.addCleanup(os.remove, file_path)
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
key_name = 'Algorithm'
msg = (('One of the artifact information may not have '
'the key value("%(key)s")') % {'key': key_name})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_false_source_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_false_source')
self.addCleanup(os.remove, file_path)
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
artifact_path = 'Scripts/install.s'
msg = (('The path("%(artifact_path)s") of '
'artifact Source is an invalid value.') %
{'artifact_path': artifact_path})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_false_algorithm_with_vnf_artifact(
self, mock_extract_csar_zip_file):
file_path = utils.create_csar_with_unique_artifact(
'./tacker/tests/etc/samples/etsi/nfv/'
'sample_vnf_package_csar_in_meta_and_manifest_false_algorithm')
self.addCleanup(os.remove, file_path)
exc = self.assertRaises(exceptions.InvalidCSAR,
csar_utils.load_csar_data,
self.context, constants.UUID, file_path)
algorithm = 'sha-255'
artifact_path = 'Scripts/install.sh'
msg = (('The algorithm("%(algorithm)s") of '
'artifact("%(artifact_path)s") is '
'an invalid value.') %
{'algorithm': algorithm,
'artifact_path': artifact_path})
self.assertEqual(msg, exc.format_message())
@mock.patch('tacker.common.csar_utils.extract_csar_zip_file')
def test_load_csar_data_without_instantiation_level(
self, mock_extract_csar_zip_file):
@ -205,7 +393,7 @@ class TestCSARUtils(testtools.TestCase):
self, mock_extract_csar_zip_file):
file_path = self._get_csar_zip_from_dir(
'csar_without_policies')
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, file_path)
self.assertIsNone(flavours[0].get('instantiation_levels'))
self.assertEqual(vnf_data['descriptor_version'], '1.0')
@ -231,7 +419,7 @@ class TestCSARUtils(testtools.TestCase):
"csar_with_short_notation_for_artifacts"
zip_name, uniqueid = utils.create_csar_with_unique_vnfd_id(file_path)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
self.context, constants.UUID, zip_name)
self.assertEqual(vnf_data['descriptor_version'], '1.0')
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])

View File

@ -77,7 +77,7 @@ def make_vnfd_files_list(csar_path):
def create_fake_csar_dir(vnf_package_id, temp_dir,
csar_without_tosca_meta=False):
csar_dir = ('sample_vnfpkg_no_meta_single_vnfd' if csar_without_tosca_meta
else 'vnfpkgm1')
else 'sample_vnfpkg_tosca_vnfd')
fake_csar = os.path.join(temp_dir, vnf_package_id)
cfg.CONF.set_override('vnf_package_csar_path', temp_dir,
group='vnf_package')
@ -94,7 +94,8 @@ def get_expected_vnfd_data(zip_file=None):
else:
unique_name = str(uuid.uuid4())
csar_temp_dir = os.path.join('/tmp', unique_name)
utils.copy_csar_files(csar_temp_dir, 'vnfpkgm1', read_vnfd_only=True)
utils.copy_csar_files(csar_temp_dir, 'sample_vnfpkg_tosca_vnfd',
read_vnfd_only=True)
file_names = ['TOSCA-Metadata/TOSCA.meta',
'Definitions/etsi_nfv_sol001_vnfd_types.yaml',

View File

@ -91,7 +91,7 @@ class TestConductor(SqlTestCase):
def test_upload_vnf_package_content(self, mock_load_csar,
mock_load_csar_data,
mock_revert, mock_onboard):
mock_load_csar_data.return_value = (mock.ANY, mock.ANY)
mock_load_csar_data.return_value = (mock.ANY, mock.ANY, mock.ANY)
mock_load_csar.return_value = '/var/lib/tacker/5f5d99c6-844a-4c3' \
'1-9e6d-ab21b87dcfff.zip'
self.conductor.upload_vnf_package_content(
@ -110,7 +110,7 @@ class TestConductor(SqlTestCase):
mock_revert, mock_store,
mock_onboard):
address_information = "http://test.zip"
mock_load_csar_data.return_value = (mock.ANY, mock.ANY)
mock_load_csar_data.return_value = (mock.ANY, mock.ANY, mock.ANY)
mock_load_csar.return_value = '/var/lib/tacker/5f5d99c6-844a' \
'-4c31-9e6d-ab21b87dcfff.zip'
mock_store.return_value = 'location', 0, 'checksum',\
@ -159,7 +159,7 @@ class TestConductor(SqlTestCase):
cfg.CONF.set_override('vnf_package_csar_path', self.temp_dir,
group='vnf_package')
fake_csar_zip, _ = utils.create_csar_with_unique_vnfd_id(
'./tacker/tests/etc/samples/etsi/nfv/vnfpkgm1')
'./tacker/tests/etc/samples/etsi/nfv/sample_vnfpkg_tosca_vnfd')
mock_load_csar.return_value = fake_csar_zip
expected_data = fakes.get_expected_vnfd_data(zip_file=fake_csar_zip)
result = self.conductor.get_vnf_package_vnfd(self.context,

View File

@ -0,0 +1,84 @@
# Copyright 2020 NTT DATA.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.common import exceptions
from tacker import context
from tacker.objects import vnf_artifact
from tacker.objects import vnf_package
from tacker.tests.unit.db.base import SqlTestCase
from tacker.tests.unit.objects import fakes
from tacker.tests import uuidsentinel
class TestVnfPackageArtifact(SqlTestCase):
def setUp(self):
super(TestVnfPackageArtifact, self).setUp()
self.context = context.get_admin_context()
self.vnf_artifacts = self._create_vnf_artifact()
def test_vnf_package_artifact_create(self):
vnf_pack = vnf_package.VnfPackage(context=self.context,
**fakes.vnf_package_data)
vnf_pack.create()
vnf_pack_artifact_data = fakes.vnf_pack_artifact_data(vnf_pack.id)
result = vnf_artifact._vnf_artifacts_create(
self.context, vnf_pack_artifact_data)
self.assertTrue(result.id)
def _create_vnf_artifact(self):
vnf_pack = vnf_package.VnfPackage(context=self.context,
**fakes.vnf_package_data)
vnf_pack.create()
vnf_pack_artifact_data = fakes.vnf_pack_artifact_data(vnf_pack.id)
vnf_artifact_obj = vnf_artifact.VnfPackageArtifactInfo(
context=self.context, **vnf_pack_artifact_data)
vnf_artifact_obj.create()
self.assertEqual('scripts/install.sh', vnf_artifact_obj.artifact_path)
return vnf_artifact_obj
def test_get_by_id(self):
vnf_artifacts = vnf_artifact.VnfPackageArtifactInfo.get_by_id(
self.context, self.vnf_artifacts.id)
self.compare_obj(self.vnf_artifacts, vnf_artifacts)
def test_get_by_id_with_no_existing_id(self):
self.assertRaises(
exceptions.VnfArtifactNotFound,
vnf_artifact.VnfPackageArtifactInfo.get_by_id, self.context,
uuidsentinel.invalid_uuid)
def test_attribute_with_valid_data(self):
data = {'id': self.vnf_artifacts.id}
vnf_artifact_obj = vnf_artifact.VnfPackageArtifactInfo(
context=self.context, **data)
vnf_artifact_obj.obj_load_attr('artifact_path')
self.assertEqual('scripts/install.sh', vnf_artifact_obj.artifact_path)
def test_invalid_attribute(self):
self.assertRaises(exceptions.ObjectActionError,
self.vnf_artifacts.obj_load_attr, 'invalid')
def test_obj_load_attr_without_context(self):
data = {'id': self.vnf_artifacts.id}
vnf_artifact_obj = vnf_artifact.VnfPackageArtifactInfo(**data)
self.assertRaises(exceptions.OrphanedObjectError,
vnf_artifact_obj.obj_load_attr, 'artifact_path')
def test_obj_load_attr_without_id_in_object(self):
data = {'artifact_path': self.vnf_artifacts.artifact_path}
vnf_artifact_obj = vnf_artifact.VnfPackageArtifactInfo(
context=self.context, **data)
self.assertRaises(exceptions.ObjectActionError,
vnf_artifact_obj.obj_load_attr, 'artifact_path')

View File

@ -30,6 +30,7 @@ class TestVnfPackage(SqlTestCase):
self.context = context.get_admin_context()
self.vnf_package = self._create_vnf_package()
self.vnf_deployment_flavour = self._create_vnf_deployment_flavour()
self.vnf_artifacts = self._create_vnf_artifacts()
def _create_vnf_package(self):
vnfpkgm = objects.VnfPackage(context=self.context,
@ -45,6 +46,14 @@ class TestVnfPackage(SqlTestCase):
vnf_deployment_flavour.create()
return vnf_deployment_flavour
def _create_vnf_artifacts(self):
artifact_data = fake_data.vnf_artifacts
artifact_data.update({'package_uuid': self.vnf_package.id})
vnf_artifacts = objects.VnfPackageArtifactInfo(context=self.context,
**artifact_data)
vnf_artifacts.create()
return vnf_artifacts
def test_add_user_defined_data(self):
vnf_package_db = models.VnfPackage()
vnf_package_db.update(fakes.fake_vnf_package())
@ -57,9 +66,10 @@ class TestVnfPackage(SqlTestCase):
def test_vnf_package_get_by_id(self):
result = vnf_package._vnf_package_get_by_id(
self.context, self.vnf_package.id,
columns_to_join=['vnf_deployment_flavours'])
columns_to_join=['vnf_deployment_flavours', 'vnf_artifacts'])
self.assertEqual(self.vnf_package.id, result.id)
self.assertTrue(result.vnf_deployment_flavours)
self.assertTrue(result.vnf_artifacts)
def test_vnf_package_create(self):
result = vnf_package._vnf_package_create(self.context,
@ -68,7 +78,8 @@ class TestVnfPackage(SqlTestCase):
def test_vnf_package_list(self):
result = vnf_package._vnf_package_list(
self.context, columns_to_join=['vnf_deployment_flavours'])
self.context, columns_to_join=[
'vnf_deployment_flavours', 'vnf_artifacts'])
self.assertTrue(isinstance(result, list))
self.assertTrue(result)

View File

@ -81,6 +81,14 @@ vnf_deployment_flavour = {'flavour_id': 'simple',
2019, 8, 8, 0, 0, 0, tzinfo=iso8601.UTC),
}
vnf_artifacts = {
'artifact_path': 'scripts/install.sh',
'_metadata': {},
'algorithm': 'sha-256',
'hash': 'd0e7828293355a07c2dccaaa765c80b507e60e6167067c950dc2e6b0da0dbd8b',
'created_at': datetime.datetime(2020, 6, 29, 0, 0, 0, tzinfo=iso8601.UTC),
}
def get_vnf_package_vnfd_data(vnf_package_id, vnfd_id):
return {
@ -170,6 +178,17 @@ def vnf_pack_vnfd_data(vnf_pack_id):
}
def vnf_pack_artifact_data(vnf_pack_id):
return {
'package_uuid': vnf_pack_id,
'artifact_path': 'scripts/install.sh',
'algorithm': 'sha-256',
'hash': 'd0e7828293355a07c2dccaaa765c80b507e'
'60e6167067c950dc2e6b0da0dbd8b',
'_metadata': {}
}
ip_address = [{
'type': 'IPV4',
'is_dynamic': True

View File

@ -87,7 +87,8 @@ class TestVnfPackage(SqlTestCase):
self.vnf_package.id,
expected_attrs=None)
self.compare_obj(self.vnf_package, vnfpkgm,
allow_missing=['vnf_deployment_flavours'])
allow_missing=['vnf_deployment_flavours',
'vnf_artifacts'])
def test_get_by_id_with_no_existing_id(self):
self.assertRaises(

View File

@ -25,6 +25,7 @@ import yaml
from tacker.api.vnfpkgm.v1.router import VnfpkgmAPIRouter
from tacker import context
from tacker.objects import vnf_artifact as vnf_artifact_obj
from tacker.objects import vnf_deployment_flavour as vnf_deployment_flavour_obj
from tacker.objects import vnf_package as vnf_package_obj
from tacker.objects import vnf_package_vnfd as vnf_package_vnfd_obj
@ -75,6 +76,15 @@ VNFPACKAGE_RESPONSE = {
'userMetadata': {'key3': 'value3', 'key4': 'value4'},
'version': '11.22.33'
}],
'additionalArtifacts': [{
'artifactPath': 'Scripts/install.sh',
'metadata': {},
'checksum': {
'algorithm': 'SHA-256',
'hash': '27bbdb25d8f4ed6d07d6f6581b86515e8b2f005'
'9b236ef7b6f50d6674b34f02a'
}
}],
}
VNFPACKAGE_INDEX_RESPONSE = [VNFPACKAGE_RESPONSE]
@ -170,6 +180,32 @@ def _return_vnfd(updates=None):
return vnf_package_vnfd_obj.VnfPackageVnfd(**vnfd)
def _fake_artifact(updates=None):
artifact = {
'id': uuidsentinel.vnf_artifact_id,
'package_uuid': 'f26f181d-7891-4720-b022-b074ec1733ef',
'artifact_path': 'Scripts/install.sh',
'metadata': {},
'algorithm': 'SHA-256',
'hash': '27bbdb25d8f4ed6d07d6f6581b86515e8b2f0059b236ef'
'7b6f50d6674b34f02a'}
if updates:
artifact.update(updates)
return artifact
def _return_artifact(update=None):
artifact = _fake_artifact(update)
return vnf_artifact_obj.VnfPackageArtifactInfo(**artifact)
def _return_artifact_list(update=None):
artifact_obj = _return_artifact(update)
artifact_list = vnf_artifact_obj.VnfPackageArtifactInfoList()
artifact_list.objects = [artifact_obj]
return artifact_list
def fake_vnf_package(updates=None):
vnf_package = {
'id': constants.UUID,
@ -187,8 +223,12 @@ def fake_vnf_package(updates=None):
return vnf_package
def return_vnfpkg_obj(vnf_package_updates=None, vnfd_updates=None,
deployment_flavour_updates=None, software_image_updates=None):
def return_vnfpkg_obj(
vnf_package_updates=None,
vnfd_updates=None,
deployment_flavour_updates=None,
software_image_updates=None,
vnf_artifact_updates=None):
vnf_package = fake_vnf_package(vnf_package_updates)
obj = vnf_package_obj.VnfPackage(**vnf_package)
obj.vnfd = _return_vnfd(vnfd_updates)
@ -198,6 +238,7 @@ def return_vnfpkg_obj(vnf_package_updates=None, vnfd_updates=None,
flavour_list = vnf_deployment_flavour_obj.VnfDeploymentFlavoursList()
flavour_list.objects = [deployment_flavour]
obj.vnf_deployment_flavours = flavour_list
obj.vnf_artifacts = _return_artifact_list(vnf_artifact_updates)
return obj
@ -235,7 +276,8 @@ def return_vnfd_data(csar_without_tosca_meta=False):
if csar_without_tosca_meta else 'vnfpkgm1')
unique_name = str(uuid.uuid4())
csar_temp_dir = os.path.join('/tmp', unique_name)
utils.copy_csar_files(csar_temp_dir, csar_dir, csar_without_tosca_meta,
utils.copy_artifact_files(
csar_temp_dir, csar_dir, csar_without_tosca_meta,
read_vnfd_only=True)
if csar_without_tosca_meta:
file_names = ['vnfd_helloworld_single.yaml']

View File

@ -15,6 +15,7 @@
from unittest import mock
import ddt
import os
from oslo_serialization import jsonutils
from six.moves import http_client
from six.moves import urllib
@ -101,8 +102,12 @@ class TestController(base.TestCase):
req = fake_request.HTTPRequest.blank(path)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -124,8 +129,12 @@ class TestController(base.TestCase):
query)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -133,6 +142,7 @@ class TestController(base.TestCase):
{'exclude_fields': 'softwareImages'},
{'exclude_fields': 'checksum'},
{'exclude_fields': 'userDefinedData'},
{'exclude_fields': 'additionalArtifacts'}
)
def test_index_attribute_selector_exclude_fields(self, params,
mock_vnf_list):
@ -150,6 +160,7 @@ class TestController(base.TestCase):
{'fields': 'softwareImages'},
{'fields': 'checksum'},
{'fields': 'userDefinedData'},
{'fields': 'additionalArtifacts'}
)
def test_index_attribute_selector_fields(self, params, mock_vnf_list):
"""Test valid attribute names with fields parameter
@ -157,7 +168,11 @@ class TestController(base.TestCase):
We can specify complex attributes in fields. Hence the data only
contains such attributes.
"""
complex_attrs = ['softwareImages', 'checksum', 'userDefinedData']
complex_attrs = [
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts']
query = urllib.parse.urlencode(params)
req = fake_request.HTTPRequest.blank('/vnfpkgm/v1/vnf_packages?' +
query)
@ -187,8 +202,11 @@ class TestController(base.TestCase):
'userDefinedData': {'key1': 'value1'},
'softwareImages': [{'userMetadata': {'key3': 'value3'}}]
}
expected_result = fakes.index_response(remove_attrs=[
'checksum'], vnf_package_updates=vnf_package_updates)
expected_result = fakes.index_response(
remove_attrs=[
'checksum',
'additionalArtifacts'],
vnf_package_updates=vnf_package_updates)
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -200,14 +218,15 @@ class TestController(base.TestCase):
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'checksum', 'softwareImages'])
'checksum', 'softwareImages', 'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
def test_index_attribute_selector_nested_complex_attribute(self,
mock_vnf_list):
params = {'fields': 'softwareImages/checksum/algorithm,'
'softwareImages/minRam'}
'softwareImages/minRam,additionalArtifacts/metadata,'
'additionalArtifacts/checksum/algorithm'}
query = urllib.parse.urlencode(params)
req = fake_request.HTTPRequest.blank('/vnfpkgm/v1/vnf_packages?' +
query)
@ -217,6 +236,10 @@ class TestController(base.TestCase):
'softwareImages': [{
'minRam': 0,
'checksum': {'algorithm': 'fake-algorithm'}
}],
'additionalArtifacts': [{
'metadata': {},
'checksum': {'algorithm': 'SHA-256'}
}]
}
expected_result = fakes.index_response(remove_attrs=[
@ -236,7 +259,18 @@ class TestController(base.TestCase):
{'filter': '(gte,softwareImages/createdAt,2020-03-14 04:10:15+00:00)'},
{'filter': '(lt,softwareImages/createdAt,2020-03-20 04:10:15+00:00)'},
{'filter': '(lte,softwareImages/createdAt,2020-03-11 04:10:15+00:00)'},
)
{'filter': '(eq,additionalArtifacts/checksum/algorithm,'
'SHA-256)'},
{'filter': '(neq,additionalArtifacts/checksum/algorithm,'
'SHA-256)'},
{'filter': '(in,additionalArtifacts/checksum/algorithm,'
'SHA-256)'},
{'filter': '(nin,additionalArtifacts/checksum/algorithm,'
'SHA-256)'},
{'filter': '(cont,additionalArtifacts/checksum/algorithm,'
'SHA-256)'},
{'filter': '(ncont,additionalArtifacts/checksum/algorithm,'
'SHA-256)'})
def test_index_filter_operator(self, filter_params, mock_vnf_list):
"""Tests all supported operators in filter expression """
query = urllib.parse.urlencode(filter_params)
@ -244,8 +278,12 @@ class TestController(base.TestCase):
query)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -257,8 +295,12 @@ class TestController(base.TestCase):
query)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -289,6 +331,9 @@ class TestController(base.TestCase):
{'filter': '(eq,softwareImages/containerFormat,dummy_value)'},
{'filter': '(eq,softwareImages/checksum/hash,dummy_value)'},
{'filter': '(eq,softwareImages/checksum/algorithm,dummy_value)'},
{'filter': '(eq,additionalArtifacts/artifactPath,dummy_value)'},
{'filter': '(eq,additionalArtifacts/checksum/algorithm,dummy_value)'},
{'filter': '(eq,additionalArtifacts/checksum/hash,dummy_value)'}
)
def test_index_filter_attributes(self, filter_params, mock_vnf_list):
"""Test various attributes supported for filter parameter """
@ -297,8 +342,12 @@ class TestController(base.TestCase):
query)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -325,8 +374,12 @@ class TestController(base.TestCase):
query)
mock_vnf_list.return_value = fakes.return_vnf_package_list()
res_dict = self.controller.index(req)
expected_result = fakes.index_response(remove_attrs=[
'softwareImages', 'checksum', 'userDefinedData'])
expected_result = fakes.index_response(
remove_attrs=[
'softwareImages',
'checksum',
'userDefinedData',
'additionalArtifacts'])
self.assertEqual(expected_result, res_dict)
@mock.patch.object(VnfPackagesList, "get_by_filters")
@ -936,3 +989,142 @@ class TestController(base.TestCase):
self.assertRaises(exc.HTTPBadRequest,
self.controller._get_range_from_request, request,
120)
def test_fetch_vnf_package_artifacts_with_invalid_uuid(
self):
# invalid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.INVALID_UUID, constants.ARTIFACT_PATH))
req.method = 'GET'
exception = self.assertRaises(exc.HTTPNotFound,
self.controller.fetch_vnf_package_artifacts,
req, constants.INVALID_UUID, constants.ARTIFACT_PATH)
self.assertEqual(
"Can not find requested vnf package: %s" % constants.INVALID_UUID,
exception.explanation)
@mock.patch.object(controller.VnfPkgmController, "_get_csar_path")
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_invalid_path(
self, mock_vnf_by_id, mock_get_csar_path):
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
base_path = os.path.dirname(os.path.abspath(__file__))
extract_path = os.path.join(base_path, '../../etc/samples/'
'sample_vnf_package_csar_in_meta_and_manifest')
mock_get_csar_path.return_value = extract_path
# valid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.INVALID_ARTIFACT_PATH))
req.method = 'GET'
self.assertRaises(exc.HTTPNotFound,
self.controller.fetch_vnf_package_artifacts,
req, constants.UUID,
constants.INVALID_ARTIFACT_PATH)
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_invalid_range(
self, mock_vnf_by_id):
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
# valid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.headers['Range'] = 'bytes=150-'
req.method = 'GET'
self.assertRaises(exc.HTTPRequestRangeNotSatisfiable,
self.controller._get_range_from_request, req,
33)
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_invalid_multiple_range(
self, mock_vnf_by_id):
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
# valid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.headers['Range'] = 'bytes=10-20,21-30'
req.method = 'GET'
self.assertRaises(exc.HTTPBadRequest,
self.controller._get_range_from_request, req,
33)
@mock.patch.object(controller.VnfPkgmController, "_get_csar_path")
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_range(
self, mock_vnf_by_id, mock_get_csar_path):
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
base_path = os.path.dirname(os.path.abspath(__file__))
extract_path = os.path.join(base_path, '../../etc/samples/'
'sample_vnf_package_csar_in_meta_and_manifest')
mock_get_csar_path.return_value = extract_path
# valid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.headers['Range'] = 'bytes=10-30'
req.method = 'GET'
absolute_artifact_path = \
os.path.join(extract_path, constants.ARTIFACT_PATH)
with open(absolute_artifact_path, 'rb') as f:
f.seek(10, 1)
data = f.read(20)
artifact_data = \
self.controller._download_vnf_artifact(
absolute_artifact_path, 10, 20)
self.assertEqual(data, artifact_data)
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_non_existing_vnf_package(
self, mock_vnf_by_id):
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.method = 'GET'
mock_vnf_by_id.side_effect = tacker_exc.VnfPackageNotFound
self.assertRaises(exc.HTTPNotFound,
self.controller.fetch_vnf_package_artifacts, req,
constants.UUID, constants.ARTIFACT_PATH)
@mock.patch.object(controller.VnfPkgmController, "_get_csar_path")
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_non_range(
self, mock_vnf_by_id, mock_get_csar_path):
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
base_path = os.path.dirname(os.path.abspath(__file__))
extract_path = os.path.join(base_path, '../../etc/samples/'
'sample_vnf_package_csar_in_meta_and_manifest')
mock_get_csar_path.return_value = extract_path
# valid_uuid
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.method = 'GET'
absolute_artifact_path = \
os.path.join(extract_path, constants.ARTIFACT_PATH)
with open(absolute_artifact_path, 'rb') as f:
data = f.read()
artifact_data = \
self.controller._download_vnf_artifact(
absolute_artifact_path, 0, 34)
self.assertEqual(data, artifact_data)
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
def test_fetch_vnf_package_artifacts_with_invalid_status(
self, mock_vnf_by_id):
vnf_package_updates = {
'onboarding_state': 'CREATED',
'operational_state': 'DISABLED'
}
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj(
vnf_package_updates=vnf_package_updates)
req = fake_request.HTTPRequest.blank(
'/vnf_packages/%s/artifacts/%s'
% (constants.UUID, constants.ARTIFACT_PATH))
req.method = 'GET'
self.assertRaises(exc.HTTPConflict,
self.controller.fetch_vnf_package_artifacts,
req, constants.UUID,
constants.ARTIFACT_PATH)

View File

@ -15,6 +15,7 @@
import base64
import http.server
import os
import re
import shutil
import threading
@ -79,6 +80,25 @@ def create_csar_with_unique_vnfd_id(csar_dir):
common_dir = os.path.join(csar_dir, "../common/")
zcsar = zipfile.ZipFile(tempname, 'w')
artifact_files = []
for (dpath, _, fnames) in os.walk(csar_dir):
if not fnames:
continue
for fname in fnames:
if fname == 'TOSCA.meta' or fname.endswith('.mf'):
src_file = os.path.join(dpath, fname)
with open(src_file, 'rb') as f:
artifacts_data = f.read()
artifacts_data_split = re.split(b'\n\n+', artifacts_data)
for data in artifacts_data_split:
if re.findall(b'.?Algorithm:.?|.?Hash:.?', data):
artifact_data_dict = yaml.safe_load(data)
artifact_files.append(
artifact_data_dict['Source']
if 'Source' in artifact_data_dict.keys()
else artifact_data_dict['Name'])
artifact_files = list(set(artifact_files))
for (dpath, _, fnames) in os.walk(csar_dir):
if not fnames:
continue
@ -86,11 +106,15 @@ def create_csar_with_unique_vnfd_id(csar_dir):
src_file = os.path.join(dpath, fname)
dst_file = os.path.relpath(os.path.join(dpath, fname), csar_dir)
if fname.endswith('.yaml') or fname.endswith('.yml'):
if dst_file not in artifact_files:
with open(src_file, 'rb') as yfile:
data = yaml.safe_load(yfile)
_update_unique_id_in_yaml(data, unique_id)
zcsar.writestr(dst_file, yaml.dump(
data, default_flow_style=False, allow_unicode=True))
data, default_flow_style=False,
allow_unicode=True))
else:
zcsar.write(src_file, dst_file)
else:
zcsar.write(src_file, dst_file)
@ -106,6 +130,79 @@ def create_csar_with_unique_vnfd_id(csar_dir):
return tempname, unique_id
def create_csar_with_unique_artifact(csar_dir):
unique_id = uuidutils.generate_uuid()
tempfd, tempname = tempfile.mkstemp(suffix=".zip",
dir=os.path.dirname(csar_dir))
os.close(tempfd)
common_artifact_dir = os.path.join(csar_dir, '../common_artifact')
common_dir = os.path.join(csar_dir, '../common')
zcsar = zipfile.ZipFile(tempname, 'w')
artifact_files = []
for (dpath, _, fnames) in os.walk(csar_dir):
if not fnames:
continue
for fname in fnames:
if fname == 'TOSCA.meta' or fname.endswith('.mf'):
src_file = os.path.join(dpath, fname)
with open(src_file, 'rb') as f:
artifacts_data = f.read()
artifacts_data_split = re.split(b'\n\n+', artifacts_data)
for data in artifacts_data_split:
if re.findall(b".?Algorithm:.?", data) and\
re.findall(b".?Hash:.?", data):
artifact_data_dict = yaml.safe_load(data)
artifact_files.append(
artifact_data_dict['Source']
if 'Source' in artifact_data_dict.keys()
else artifact_data_dict['Name'])
artifact_files = list(set(artifact_files))
for (dpath, _, fnames) in os.walk(common_artifact_dir):
if not fnames:
continue
for fname in fnames:
src_file = os.path.join(dpath, fname)
dst_file = os.path.relpath(
os.path.join(dpath, fname), common_artifact_dir)
if fname.endswith('.yaml') or fname.endswith('.yml'):
if dst_file not in artifact_files:
with open(src_file, 'rb') as yfile:
data = yaml.safe_load(yfile)
_update_unique_id_in_yaml(data, unique_id)
zcsar.writestr(dst_file, yaml.dump(
data, default_flow_style=False,
allow_unicode=True))
else:
zcsar.write(src_file, dst_file)
else:
zcsar.write(src_file, dst_file)
for (dpath, _, fnames) in os.walk(csar_dir):
if not fnames:
continue
for fname in fnames:
src_file = os.path.join(dpath, fname)
dst_file = os.path.relpath(os.path.join(dpath, fname), csar_dir)
zcsar.write(src_file, dst_file)
for (dpath, _, fnames) in os.walk(common_dir):
if not fnames:
continue
if ('vnf_instance' in csar_dir and 'kubernetes' in dpath) or \
('vnf_instance' in csar_dir and 'Scripts' in dpath):
continue
for fname in fnames:
src_file = os.path.join(dpath, fname)
dst_file = os.path.relpath(os.path.join(dpath, fname), common_dir)
zcsar.write(src_file, dst_file)
zcsar.close()
return tempname
def copy_csar_files(fake_csar_path, csar_dir_name,
csar_without_tosca_meta=False, read_vnfd_only=False):
"""Copy csar directory to temporary directory
@ -146,6 +243,43 @@ def copy_csar_files(fake_csar_path, csar_dir_name,
"Definitions"))
def copy_artifact_files(fake_csar_path, csar_dir_name,
csar_without_tosca_meta=False, read_vnfd_only=False):
sample_vnf_package = os.path.join(
"./tacker/tests/etc/samples/etsi/nfv", csar_dir_name)
shutil.copytree(sample_vnf_package, fake_csar_path)
common_files_path = os.path.join(
"./tacker/tests/etc/samples/etsi/nfv/")
if not read_vnfd_only:
# Copying image file.
shutil.copytree(os.path.join(common_files_path, "Files/"),
os.path.join(fake_csar_path, "Files/"))
shutil.copytree(os.path.join(common_files_path, "Scripts/"),
os.path.join(fake_csar_path, "Scripts/"))
if csar_without_tosca_meta:
return
# Copying common vnfd files.
tosca_definition_file_paths = [
os.path.join(common_files_path, "common_artifact/Definitions/"),
os.path.join(common_files_path, "common/Definitions/")
]
for tosca_definition_file_path in tosca_definition_file_paths:
for (dpath, _, fnames) in os.walk(tosca_definition_file_path):
if not fnames:
continue
for fname in fnames:
src_file = os.path.join(dpath, fname)
if not os.path.exists(os.path.join(
fake_csar_path, "Definitions")):
os.mkdir(os.path.join(fake_csar_path, "Definitions"))
os.mknod(os.path.join(fake_csar_path, "Definitions", fname))
shutil.copyfile(src_file, os.path.join(
fake_csar_path, "Definitions", fname))
class AuthHandler(http.server.SimpleHTTPRequestHandler):
'''Main class to present webpages and authentication.'''