Implementation Artifacts support in Tacker

Implements: blueprint add-artifacts-vnf-packages

* Add artifacts support for vnf packages:
  * Read and verify artifacts from TOSCA.meta and manifest file.
  * Modify and add artifact related APIs

Change-Id: Iad045407338535022aa385b57125225ee6253732
changes/97/739697/19
LiangLu 2 years ago
parent 959da510b5
commit c0647bde69
  1. 139
      tacker/api/vnfpkgm/v1/controller.py
  2. 7
      tacker/api/vnfpkgm/v1/router.py
  3. 153
      tacker/common/csar_utils.py
  4. 8
      tacker/common/exceptions.py
  5. 39
      tacker/conductor/conductor_server.py
  6. 20
      tacker/db/db_sqlalchemy/models.py
  7. 2
      tacker/db/migration/alembic_migrations/versions/HEAD
  8. 55
      tacker/db/migration/alembic_migrations/versions/e06fbdc90a32_add_db_tables_for_add_artifacts.py
  9. 1
      tacker/objects/__init__.py
  10. 208
      tacker/objects/vnf_artifact.py
  11. 56
      tacker/objects/vnf_package.py
  12. 10
      tacker/policies/vnf_package.py
  13. 3
      tacker/tests/constants.py
  14. 54
      tacker/tests/etc/samples/etsi/nfv/common/Definitions/etsi_nfv_sol001_common_types.yaml
  15. 263
      tacker/tests/etc/samples/etsi/nfv/common/Definitions/etsi_nfv_sol001_vnfd_types.yaml
  16. 48
      tacker/tests/etc/samples/etsi/nfv/common/Files/kubernetes/deployment.yaml
  17. 3
      tacker/tests/etc/samples/etsi/nfv/common/Scripts/install.sh
  18. 0
      tacker/tests/etc/samples/etsi/nfv/common_artifact/Definitions/helloworld3_df_simple.yaml
  19. 0
      tacker/tests/etc/samples/etsi/nfv/common_artifact/Definitions/helloworld3_top.vnfd.yaml
  20. 0
      tacker/tests/etc/samples/etsi/nfv/common_artifact/Definitions/helloworld3_types.yaml
  21. 10
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest/TOSCA-Metadata/TOSCA.meta
  22. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest/manifest.mf
  23. 5
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false/TOSCA-Metadata/TOSCA.meta
  24. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false/manifest.mf
  25. 9
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false_algorithm/TOSCA-Metadata/TOSCA.meta
  26. 9
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false_hash/TOSCA-Metadata/TOSCA.meta
  27. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false_hash/manifest.mf
  28. 9
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_false_source/TOSCA-Metadata/TOSCA.meta
  29. 8
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_missing_key/TOSCA-Metadata/TOSCA.meta
  30. 9
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_meta_and_manifest_missing_value/TOSCA-Metadata/TOSCA.meta
  31. 1266
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_single_manifest_false_name/VNF.yaml
  32. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_in_single_manifest_false_name/VNF1.mf
  33. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_manifest/VNF.mf
  34. 1266
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_manifest/VNF.yaml
  35. 9
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_meta/TOSCA-Metadata/TOSCA.meta
  36. 5
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_meta_in_manifest/TOSCA-Metadata/TOSCA.meta
  37. 11
      tacker/tests/etc/samples/etsi/nfv/sample_vnf_package_csar_meta_in_manifest/manifest.mf
  38. 275
      tacker/tests/etc/samples/etsi/nfv/sample_vnfpkg_tosca_vnfd/Definitions/helloworld3_df_simple.yaml
  39. 31
      tacker/tests/etc/samples/etsi/nfv/sample_vnfpkg_tosca_vnfd/Definitions/helloworld3_top.vnfd.yaml
  40. 53
      tacker/tests/etc/samples/etsi/nfv/sample_vnfpkg_tosca_vnfd/Definitions/helloworld3_types.yaml
  41. 7
      tacker/tests/etc/samples/etsi/nfv/sample_vnfpkg_tosca_vnfd/TOSCA-Metadata/TOSCA.meta
  42. 17
      tacker/tests/etc/samples/etsi/nfv/vnfpkgm1/TOSCA-Metadata/TOSCA.meta
  43. 3
      tacker/tests/etc/samples/etsi/nfv/vnfpkgm2/TOSCA-Metadata/TOSCA.meta
  44. 11
      tacker/tests/etc/samples/etsi/nfv/vnfpkgm2/manifest.mf
  45. 11
      tacker/tests/etc/samples/etsi/nfv/vnfpkgm3/vnfd_helloworld_single.mf
  46. 1266
      tacker/tests/etc/samples/etsi/nfv/vnfpkgm3/vnfd_helloworld_single.yaml
  47. 202
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/etsi_nfv_sol001_common_types.yaml
  48. 1465
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/etsi_nfv_sol001_vnfd_types.yaml
  49. 308
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/sample_vnfd_df_complex_int.yaml
  50. 308
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/sample_vnfd_df_simple_int.yaml
  51. 32
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/sample_vnfd_top.yaml
  52. 65
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Definitions/sample_vnfd_types.yaml
  53. 1
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Files/images/cirros-0.4.0-x86_64-disk.img
  54. 1
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Files/images/cirros.img
  55. 48
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Files/kubernetes/deployment.yaml
  56. 3
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/Scripts/install.sh
  57. 10
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/TOSCA-Metadata/TOSCA.meta
  58. 11
      tacker/tests/etc/samples/sample_vnf_package_csar_in_meta_and_manifest/manifest.mf
  59. 4
      tacker/tests/etc/samples/vnfd_lcm_user_data.yaml
  60. 2
      tacker/tests/functional/base.py
  61. 106
      tacker/tests/functional/vnfpkgm/test_vnf_package.py
  62. 198
      tacker/tests/unit/common/test_csar_utils.py
  63. 5
      tacker/tests/unit/conductor/fakes.py
  64. 6
      tacker/tests/unit/conductor/test_conductor_server.py
  65. 84
      tacker/tests/unit/db/test_vnf_artifact.py
  66. 15
      tacker/tests/unit/db/test_vnf_package.py
  67. 19
      tacker/tests/unit/objects/fakes.py
  68. 3
      tacker/tests/unit/objects/test_vnf_package.py
  69. 50
      tacker/tests/unit/vnfpkgm/fakes.py
  70. 228
      tacker/tests/unit/vnfpkgm/test_controller.py
  71. 144
      tacker/tests/utils.py

@ -13,9 +13,15 @@
# License for the specific language governing permissions and limitations
# under the License.
from io import BytesIO
import mimetypes
import os
from glance_store import exceptions as store_exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from oslo_utils import uuidutils
import six
@ -28,6 +34,7 @@ from tacker._i18n import _
from tacker.api.schemas import vnf_packages
from tacker.api import validation
from tacker.api.views import vnf_packages as vnf_packages_view
from tacker.common import csar_utils
from tacker.common import exceptions
from tacker.common import utils
from tacker.conductor.conductorrpc import vnf_pkgm_rpc
@ -99,8 +106,8 @@ class VnfPkgmController(wsgi.Controller):
try:
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
request.context, id,
expected_attrs=["vnf_deployment_flavours", "vnfd"])
request.context, id, expected_attrs=[
"vnf_deployment_flavours", "vnfd", "vnf_artifacts"])
except exceptions.VnfPackageNotFound:
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
@ -475,6 +482,134 @@ class VnfPkgmController(wsgi.Controller):
request.response.headers['Content-Type'] = 'application/zip'
return self._create_vnfd_zip(vnfd_files_and_data)
@wsgi.response(http_client.OK)
@wsgi.expected_errors((http_client.BAD_REQUEST, http_client.FORBIDDEN,
http_client.NOT_FOUND, http_client.CONFLICT,
http_client.REQUESTED_RANGE_NOT_SATISFIABLE))
def fetch_vnf_package_artifacts(self, request, id, artifact_path):
context = request.environ['tacker.context']
# get policy
context.can(vnf_package_policies.VNFPKGM % 'fetch_artifact')
# get vnf_package
if not uuidutils.is_uuid_like(id):
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
try:
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
request.context, id,
expected_attrs=["vnf_artifacts"])
except exceptions.VnfPackageNotFound:
msg = _("Can not find requested vnf package: %s") % id
raise webob.exc.HTTPNotFound(explanation=msg)
if vnf_package.onboarding_state != \
fields.PackageOnboardingStateType.ONBOARDED:
msg = _("VNF Package %(id)s state is not "
"%(onboarded)s")
raise webob.exc.HTTPConflict(explanation=msg % {"id": id,
"onboarded": fields.PackageOnboardingStateType.ONBOARDED})
offset, chunk_size = 0, None
# get all artifact's path
artifact_file_paths = []
for item in vnf_package.vnf_artifacts:
artifact_file_paths.append(item.artifact_path)
if artifact_path in artifact_file_paths:
# get file's size
csar_path = self._get_csar_path(vnf_package)
absolute_artifact_path = os.path.join(csar_path, artifact_path)
if not os.path.isfile(absolute_artifact_path):
msg = _(
"This type of path(url) '%s' is currently not supported") \
% artifact_path
raise webob.exc.HTTPBadRequest(explanation=msg)
artifact_size = os.path.getsize(absolute_artifact_path)
range_val = self._get_range_from_request(request, artifact_size)
# range_val exists
if range_val:
if isinstance(range_val, webob.byterange.Range):
# get the position of the last byte in the artifact file
response_end = artifact_size - 1
if range_val.start >= 0:
offset = range_val.start
else:
if abs(range_val.start) < artifact_size:
offset = artifact_size + range_val.start
if range_val.end is not None and \
range_val.end < artifact_size:
chunk_size = range_val.end - offset
response_end = range_val.end - 1
else:
chunk_size = artifact_size - offset
request.response.status_int = 206
# range_val does not exist, download the whole content of file
else:
offset = 0
chunk_size = artifact_size
# get file's mineType;
mime_type = mimetypes.guess_type(artifact_path.split('/')[-1])[0]
if mime_type:
request.response.headers['Content-Type'] = mime_type
else:
request.response.headers['Content-Type'] = \
'application/octet-stream'
try:
artifact_data = self._download_vnf_artifact(
absolute_artifact_path, offset, chunk_size)
except exceptions.FailedToGetVnfArtifact as e:
LOG.error(e.msg)
raise webob.exc.HTTPInternalServerError(
explanation=e.msg)
request.response.text = artifact_data.decode('utf-8')
if request.response.status_int == 206:
request.response.headers['Content-Range'] = 'bytes %s-%s/%s' \
% (offset,
response_end,
artifact_size)
else:
chunk_size = artifact_size
request.response.headers['Content-Length'] = chunk_size
return request.response
else:
msg = _("Not Found Artifact File.")
raise webob.exc.HTTPNotFound(explanation=msg)
def _get_csar_path(self, vnf_package):
csar_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
vnf_package.id)
if not os.path.isdir(csar_path):
location = vnf_package.location_glance_store
try:
zip_path = glance_store.load_csar(vnf_package.id, location)
csar_utils.extract_csar_zip_file(zip_path, csar_path)
except (store_exceptions.GlanceStoreException) as e:
exc_msg = encodeutils.exception_to_unicode(e)
msg = (_("Exception raised from glance store can be "
"unrecoverable if it is not related to connection"
" error. Error: %s.") % exc_msg)
raise exceptions.FailedToGetVnfArtifact(error=msg)
return csar_path
def _download_vnf_artifact(self, artifact_file_path, offset=0,
chunk_size=None):
try:
with open(artifact_file_path, 'rb') as f:
f.seek(offset, 1)
vnf_artifact_data = f.read(chunk_size)
return vnf_artifact_data
except Exception as e:
exc_msg = encodeutils.exception_to_unicode(e)
msg = (_("Exception raised while reading artifact file"
" Error: %s.") % exc_msg)
raise exceptions.FailedToGetVnfArtifact(error=msg)
def _create_vnfd_zip(self, vnfd_files_and_data):
buff = BytesIO()
with ZipFile(buff, 'w', zipfile.ZIP_DEFLATED) as zip_archive:

@ -81,3 +81,10 @@ class VnfpkgmAPIRouter(wsgi.Router):
self._setup_route(mapper,
"/vnf_packages/{id}/vnfd",
methods, controller, default_resource)
# Allowed methods on /vnf_packages/{id}/artifacts/{artifact_path}
methods = {"GET": "fetch_vnf_package_artifacts"}
self._setup_route(mapper,
"/vnf_packages/{id}/artifacts/"
"{artifact_path:.*?/*.*?}",
methods, controller, default_resource)

@ -12,22 +12,38 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
import os
import re
import shutil
import yaml
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import excutils
from six.moves.urllib.parse import urlparse
from toscaparser.prereq.csar import CSAR
from toscaparser.tosca_template import ToscaTemplate
import zipfile
from tacker.common import exceptions
import tacker.conf
import urllib.request as urllib2
HASH_DICT = {
'sha-224': hashlib.sha224,
'sha-256': hashlib.sha256,
'sha-384': hashlib.sha384,
'sha-512': hashlib.sha512
}
CONF = tacker.conf.CONF
LOG = logging.getLogger(__name__)
TOSCA_META = 'TOSCA-Metadata/TOSCA.meta'
ARTIFACT_KEYS = ['Source', 'Algorithm', 'Hash']
IMAGE_FORMAT_LIST = ['raw', 'vhd', 'vhdx', 'vmdk', 'vdi', 'iso', 'ploop',
'qcow2', 'aki', 'ari', 'ami', 'img']
def _check_type(custom_def, node_type, type_list):
@ -272,7 +288,140 @@ def _get_data_from_csar(tosca, context, id):
error_msg = "No VNF flavours are available"
raise exceptions.InvalidCSAR(error_msg)
return vnf_data, flavours
csar = CSAR(tosca.input_path, tosca.a_file)
vnf_artifacts = []
if csar.validate():
vnf_artifacts = _get_vnf_artifacts(csar)
return vnf_data, flavours, vnf_artifacts
def _get_vnf_artifacts(csar):
vnf_artifacts = []
if csar.is_tosca_metadata:
if csar._get_metadata("ETSI-Entry-Manifest"):
manifest_path = csar._get_metadata("ETSI-Entry-Manifest")
if manifest_path.lower().endswith(".mf"):
manifest_data = csar.zfile.read(manifest_path)
vnf_artifacts = _convert_artifacts(
vnf_artifacts, manifest_data, csar)
else:
invalid_manifest_err_msg = (
('The file "%(manifest)s" in the CSAR "%(csar)s" does not '
'contain valid manifest.') %
{'manifest': manifest_path, 'csar': csar.path})
raise exceptions.InvalidCSAR(invalid_manifest_err_msg)
tosca_data = csar.zfile.read(TOSCA_META)
vnf_artifacts = _convert_artifacts(vnf_artifacts, tosca_data, csar)
else:
filelist = csar.zfile.namelist()
main_template_file_name = os.path.splitext(
csar.main_template_file_name)[0]
for path in filelist:
if path.lower().endswith(".mf"):
manifest_file_name = os.path.splitext(path)[0]
if manifest_file_name == main_template_file_name:
manifest_data = csar.zfile.read(path)
vnf_artifacts = _convert_artifacts(
vnf_artifacts, manifest_data, csar)
else:
invalid_manifest_err_msg = \
(('The filename "%(manifest)s" is an invalid name.'
'The name must be the same as the main template '
'file name.') %
{'manifest': path})
raise exceptions.InvalidCSAR(invalid_manifest_err_msg)
# Deduplication
vnf_artifacts = [dict(t) for t in set([tuple(d.items())
for d in vnf_artifacts])]
return vnf_artifacts
def _convert_artifacts(vnf_artifacts, artifacts_data, csar):
artifacts_data_split = re.split(b'\n\n+', artifacts_data)
for data in artifacts_data_split:
if re.findall(b'.?Name:.?|.?Source:.?|', data):
# validate key's existence
if re.findall(b'.?Algorithm:.?|.?Hash:.?', data):
artifact_data_dict = yaml.safe_load(data)
if 'Name' in artifact_data_dict.keys():
artifact_data_dict.update(
{"Source": artifact_data_dict.pop("Name")})
if 'Content-Type' in artifact_data_dict.keys():
del artifact_data_dict['Content-Type']
if sorted(ARTIFACT_KEYS) != sorted(artifact_data_dict.keys()):
missing_key = list(set(ARTIFACT_KEYS) ^
set(artifact_data_dict.keys()))
missing_key = sorted(missing_key)
invalid_artifact_err_msg = \
(('One of the artifact information '
'may not have the key("%(key)s")') %
{'key': missing_key})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
# validate value's existence
for key, value in artifact_data_dict.items():
if not value:
invalid_artifact_err_msg = \
(('One of the artifact information may not have '
'the key value("%(key)s")') % {'key': key})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
artifact_path = artifact_data_dict.get('Source')
if os.path.splitext(artifact_path)[-1][1:] \
in IMAGE_FORMAT_LIST:
continue
else:
algorithm = artifact_data_dict.get('Algorithm')
hash_code = artifact_data_dict.get('Hash')
result = _validate_hash(algorithm, hash_code,
csar, artifact_path)
if result:
vnf_artifacts.append(artifact_data_dict)
else:
invalid_artifact_err_msg = \
(('The hash "%(hash)s" of artifact file '
'"%(artifact)s" is an invalid value.') %
{'hash': hash_code, 'artifact': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
return vnf_artifacts
def _validate_hash(algorithm, hash_code, csar, artifact_path):
z = zipfile.ZipFile(csar.path)
algorithm = algorithm.lower()
# validate Algorithm's value
if algorithm in HASH_DICT.keys():
hash_obj = HASH_DICT[algorithm]()
else:
invalid_artifact_err_msg = (('The algorithm("%(algorithm)s") of '
'artifact("%(artifact_path)s") is '
'an invalid value.') %
{'algorithm': algorithm,
'artifact_path': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
filelist = csar.zfile.namelist()
# validate Source's value
if artifact_path in filelist:
hash_obj.update(z.read(artifact_path))
elif ((urlparse(artifact_path).scheme == 'file') or
(bool(urlparse(artifact_path).scheme) and
bool(urlparse(artifact_path).netloc))):
hash_obj.update(urllib2.urlopen(artifact_path).read())
else:
invalid_artifact_err_msg = (('The path("%(artifact_path)s") of '
'artifact Source is an invalid value.') %
{'artifact_path': artifact_path})
raise exceptions.InvalidCSAR(invalid_artifact_err_msg)
# validate Hash's value
if hash_code == hash_obj.hexdigest():
return True
else:
return False
def extract_csar_zip_file(file_path, extract_path):

@ -248,6 +248,10 @@ class VnfSoftwareImageNotFound(NotFound):
message = _("No vnf software image with id %(id)s.")
class VnfArtifactNotFound(NotFound):
message = _("No vnf artifact with id %(id)s.")
class VnfInstantiatedInfoNotFound(NotFound):
message = _("No vnf instantiated info for vnf id %(vnf_instance_id)s.")
@ -303,6 +307,10 @@ class FailedToGetVnfdData(Invalid):
message = _("Failed to get csar zip file from glance store: %(error)s")
class FailedToGetVnfArtifact(Invalid):
message = _("Failed to get artifact file from glance store: %(error)s")
class FailedToGetVnfPackageDetails(Invalid):
message = _("Failed to get vnf package details: %(error)s")

@ -233,7 +233,26 @@ class Conductor(manager.Manager):
self._create_software_images(
context, sw_image, deploy_flavour.id)
def _onboard_vnf_package(self, context, vnf_package, vnf_data, flavours):
def _create_vnf_artifacts(self, context, package_uuid, artifact):
vnf_artifact = objects.VnfPackageArtifactInfo(context=context)
vnf_artifact.package_uuid = package_uuid
vnf_artifact.artifact_path = artifact['Source']
vnf_artifact.algorithm = artifact['Algorithm']
vnf_artifact.hash = artifact['Hash']
vnf_artifact._metadata = {}
vnf_artifact.create()
def _onboard_vnf_package(
self,
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts):
if vnf_artifacts:
for artifact in vnf_artifacts:
self._create_vnf_artifacts(context, vnf_package.id, artifact)
package_vnfd = objects.VnfPackageVnfd(context=context)
package_vnfd.package_uuid = vnf_package.id
@ -251,9 +270,14 @@ class Conductor(manager.Manager):
def upload_vnf_package_content(self, context, vnf_package):
location = vnf_package.location_glance_store
zip_path = glance_store.load_csar(vnf_package.id, location)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
context.elevated(), vnf_package.id, zip_path)
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
self._onboard_vnf_package(
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts)
vnf_package.onboarding_state = (
fields.PackageOnboardingStateType.ONBOARDED)
vnf_package.operational_state = (
@ -282,10 +306,15 @@ class Conductor(manager.Manager):
vnf_package.save()
zip_path = glance_store.load_csar(vnf_package.id, location)
vnf_data, flavours = csar_utils.load_csar_data(
vnf_data, flavours, vnf_artifacts = csar_utils.load_csar_data(
context.elevated(), vnf_package.id, zip_path)
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
self._onboard_vnf_package(
context,
vnf_package,
vnf_data,
flavours,
vnf_artifacts)
vnf_package.onboarding_state = (
fields.PackageOnboardingStateType.ONBOARDED)

@ -129,6 +129,20 @@ class VnfPackageVnfd(model_base.BASE, VnfPackageVnfdSoftDeleteMixin,
vnfd_version = sa.Column(sa.String(255), nullable=False)
class VnfPackageArtifactInfo(model_base.BASE, models.SoftDeleteMixin,
models.TimestampMixin, models_v1.HasId):
"""Contains all info about vnf artifacts."""
__tablename__ = 'vnf_artifacts'
package_uuid = sa.Column(sa.String(36),
sa.ForeignKey('vnf_packages.id'),
nullable=False)
artifact_path = sa.Column(sa.Text(), nullable=False)
algorithm = sa.Column(sa.String(64), nullable=False)
hash = sa.Column(sa.String(128), nullable=False)
_metadata = sa.Column(sa.JSON(), nullable=True)
class VnfPackage(model_base.BASE, models.SoftDeleteMixin,
models.TimestampMixin, models_v1.HasTenant,
models_v1.HasId):
@ -160,6 +174,12 @@ class VnfPackage(model_base.BASE, models.SoftDeleteMixin,
'VnfPackageVnfd.package_uuid,'
'VnfPackageVnfd.deleted == 0)')
vnf_artifacts = orm.relationship(
VnfPackageArtifactInfo,
primaryjoin='and_(VnfPackage.id == '
'VnfPackageArtifactInfo.package_uuid,'
'VnfPackageArtifactInfo.deleted == 0)')
@property
def metadetails(self):
return {m.key: m.value for m in self._metadata}

@ -0,0 +1,55 @@
# Copyright (C) 2020 FUJITSU DATA
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""add db tables for add artifacts
Revision ID: e06fbdc90a32
Revises: d2e39e01d540
Create Date: 2020-09-17 02:52:41.435112
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'e06fbdc90a32'
down_revision = 'd2e39e01d540'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import Boolean
from tacker.db import types
def upgrade(active_plugins=None, options=None):
op.create_table(
'vnf_artifacts',
sa.Column('id', types.Uuid(length=36), nullable=False),
sa.Column('package_uuid', types.Uuid(length=36), nullable=False),
sa.Column('artifact_path', sa.Text(), nullable=False),
sa.Column('algorithm', sa.String(64), nullable=False),
sa.Column('hash', sa.String(128), nullable=False),
sa.Column('_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('deleted_at', sa.DateTime(), nullable=True),
sa.Column('deleted', Boolean, default=False),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['package_uuid'],
['vnf_packages.id'], ),
mysql_engine='InnoDB'
)

@ -35,3 +35,4 @@ def register_all():
__import__('tacker.objects.instantiate_vnf_req')
__import__('tacker.objects.vnf_resources')
__import__('tacker.objects.terminate_vnf_req')
__import__('tacker.objects.vnf_artifact')

@ -0,0 +1,208 @@
# Copyright 2020 NTT DATA.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_versionedobjects import base as ovoo_base
from tacker._i18n import _
from tacker.common import exceptions
from tacker.common import utils
from tacker.db import api as db_api
from tacker.db.db_sqlalchemy import api
from tacker.db.db_sqlalchemy import models
from tacker.objects import base
from tacker.objects import fields
LOG = logging.getLogger(__name__)
@db_api.context_manager.writer
def _vnf_artifacts_create(context, values):
vnf_artifacts = models.VnfPackageArtifactInfo()
vnf_artifacts.update(values)
vnf_artifacts.save(context.session)
return vnf_artifacts
@db_api.context_manager.reader
def _vnf_artifact_get_by_id(context, id):
query = api.model_query(context, models.VnfPackageArtifactInfo,
read_deleted="no").filter_by(id=id)
result = query.first()
if not result:
raise exceptions.VnfArtifactNotFound(id=id)
return result
@base.TackerObjectRegistry.register
class VnfPackageArtifactInfo(base.TackerObject, base.TackerPersistentObject):
ALL_ATTRIBUTES = {
"additionalArtifacts": {
'artifactPath': ('artifact_path', 'string',
'VnfPackageArtifactInfo'),
'metadata': ('_metadata', 'dict', 'VnfPackageArtifactInfo'),
"checksum": {
'hash': ('hash', 'string', 'VnfPackageArtifactInfo'),
'algorithm': ('algorithm', 'string', 'VnfPackageArtifactInfo')
}
}
}
FLATTEN_ATTRIBUTES = utils.flatten_dict(ALL_ATTRIBUTES.copy())
SIMPLE_ATTRIBUTES = ['artifactPath']
COMPLEX_ATTRIBUTES = [
'additionalArtifacts',
'additionalArtifacts/metadata',
'additionalArtifacts/checksum']
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.UUIDField(nullable=False),
'package_uuid': fields.UUIDField(nullable=False),
'artifact_path': fields.StringField(nullable=False),
'algorithm': fields.StringField(nullable=False),
'hash': fields.StringField(nullable=False),
'_metadata': fields.DictOfStringsField(nullable=True, default={})
}
@base.remotable_classmethod
def get_by_id(cls, context, id):
db_artifact = _vnf_artifact_get_by_id(context, id)
return cls._from_db_object(context, cls(), db_artifact)
@staticmethod
def _from_db_object(context, vnf_artifacts, db_vnf_artifacts):
for key in vnf_artifacts.fields:
setattr(vnf_artifacts, key, db_vnf_artifacts[key])
vnf_artifacts._context = context
vnf_artifacts.obj_reset_changes()
return vnf_artifacts
def obj_load_attr(self, attrname):
if not self._context:
raise exceptions.OrphanedObjectError(
method='obj_load_attr', objtype=self.obj_name())
if 'id' not in self:
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('attribute %s not lazy-loadable') % attrname)
LOG.debug("Lazy-loading '%(attr)s' on %(name)s id %(id)s",
{'attr': attrname,
'name': self.obj_name(),
'id': self.id,
})
self._obj_load_attr(attrname)
def _obj_load_attr(self, attrname):
if attrname in self.fields and attrname != 'id':
self._load_generic(attrname)
else:
# NOTE(nirajsingh): Raise error if non existing field is
# requested.
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('attribute %s not lazy-loadable') % attrname)
self.obj_reset_changes([attrname])
def _load_generic(self, attrname):
artifact = self.__class__.get_by_id(self._context,
id=self.id)
if attrname not in artifact:
raise exceptions.ObjectActionError(
action='obj_load_attr',
reason=_('loading %s requires recursion') % attrname)
for field in self.fields:
if field in artifact and field not in self:
setattr(self, field, getattr(artifact, field))
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exceptions.ObjectActionError(action='create',
reason=_('already created'))
updates = self.obj_get_changes()
db_vnf_artifacts = _vnf_artifacts_create(
self._context, updates)
self._from_db_object(self._context, self, db_vnf_artifacts)
def to_dict(self, include_fields=None):
response = dict()
fields = ['additionalArtifacts/%s' % attribute for attribute in
self.SIMPLE_ATTRIBUTES]
to_fields = set(fields).intersection(include_fields)
for field in to_fields:
display_field = field.split("/")[-1]
response[display_field] = getattr(
self, self.FLATTEN_ATTRIBUTES[field][0])
to_fields = set([key for key in self.FLATTEN_ATTRIBUTES.keys()
if key.startswith('additionalArtifacts/checksum')])
checksum = dict()
to_fields = to_fields.intersection(include_fields)
for field in to_fields:
display_field = field.split("/")[-1]
checksum[display_field] = getattr(
self, self.FLATTEN_ATTRIBUTES[field][0])
if checksum:
response.update({"checksum": checksum})
metadata = dict()
to_fields = set(['additionalArtifacts/metadata']).\
intersection(include_fields)
if to_fields:
metadata_json = \
getattr(self, self.
FLATTEN_ATTRIBUTES['additionalArtifacts/metadata'][0])
if metadata_json is not None:
metadata.update(metadata_json)
response.update({"metadata": metadata})
return response
@base.TackerObjectRegistry.register
class VnfPackageArtifactInfoList(ovoo_base.ObjectListBase, base.TackerObject):
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('VnfPackageArtifactInfo')
}
def to_dict(self, include_fields=None):
artifactList = list()
for artifact in self.objects:
arti_dict = artifact.to_dict(include_fields)
if arti_dict:
artifactList.append(arti_dict)
return artifactList

@ -33,11 +33,15 @@ from tacker.db.db_sqlalchemy import models
from tacker import objects
from tacker.objects import base
from tacker.objects import fields
from tacker.objects import vnf_artifact
from tacker.objects import vnf_software_image
_NO_DATA_SENTINEL = object()
VNF_PACKAGE_OPTIONAL_ATTRS = ['vnf_deployment_flavours', 'vnfd']
VNF_PACKAGE_OPTIONAL_ATTRS = [
'vnf_deployment_flavours',
'vnfd',
'vnf_artifacts']
LOG = logging.getLogger(__name__)
@ -178,6 +182,9 @@ def _vnf_package_list_by_filters(context, read_deleted=None, filters=None):
query = query.join(models.VnfDeploymentFlavour).join(
models.VnfSoftwareImage)
if 'VnfPackageArtifactInfo' in filter_data:
query = query.join(models.VnfPackageArtifactInfo)
query = apply_filters(query, filters)
return query.all()
@ -233,6 +240,9 @@ def _destroy_vnf_package(context, package_uuid):
api.model_query(context, models.VnfDeploymentFlavour). \
filter_by(package_uuid=package_uuid). \
update(updated_values, synchronize_session=False)
api.model_query(context, models.VnfPackageArtifactInfo). \
filter_by(package_uuid=package_uuid). \
update(updated_values, synchronize_session=False)
api.model_query(context, models.VnfPackageVnfd). \
filter_by(package_uuid=package_uuid). \
soft_delete(synchronize_session=False)
@ -294,6 +304,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
}
ALL_ATTRIBUTES.update(vnf_software_image.VnfSoftwareImage.ALL_ATTRIBUTES)
ALL_ATTRIBUTES.update(vnf_artifact.VnfPackageArtifactInfo.ALL_ATTRIBUTES)
FLATTEN_ATTRIBUTES = utils.flatten_dict(ALL_ATTRIBUTES.copy())
@ -305,6 +316,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
COMPLEX_ATTRIBUTES = ["checksum", "userDefinedData"]
COMPLEX_ATTRIBUTES.extend(
vnf_software_image.VnfSoftwareImage.COMPLEX_ATTRIBUTES)
COMPLEX_ATTRIBUTES.extend(vnf_artifact.VnfPackageArtifactInfo.
COMPLEX_ATTRIBUTES)
# Version 1.1: Added 'size' to persist size of VnfPackage.
VERSION = '1.1'
@ -325,6 +338,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
'VnfDeploymentFlavoursList', nullable=True),
'vnfd': fields.ObjectField('VnfPackageVnfd', nullable=True),
'size': fields.IntegerField(nullable=False, default=0),
'vnf_artifacts': fields.ObjectField('VnfPackageArtifactInfoList',
nullable=True)
}
def __init__(self, context=None, **kwargs):
@ -375,6 +390,10 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
if 'vnfd' in expected_attrs:
vnf_package._load_vnfd(db_vnf_package.get('vnfd'))
if 'vnf_artifacts' in expected_attrs:
vnf_package._load_vnf_artifacts(
db_vnf_package.get('vnf_artifacts'))
def _load_vnf_deployment_flavours(self, db_flavours=_NO_DATA_SENTINEL):
if db_flavours is _NO_DATA_SENTINEL:
vnf_package = self.get_by_id(
@ -412,6 +431,25 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self._context, db_vnfd)
self.obj_reset_changes(['vnfd'])
def _load_vnf_artifacts(self, db_artifact=_NO_DATA_SENTINEL):
if db_artifact is _NO_DATA_SENTINEL:
vnf_package = self.get_by_id(
self._context, self.id,
expected_attrs=['vnf_artifacts'])
if 'vnf_artifacts' in vnf_package:
self.vnf_artifacts = vnf_package.vnf_artifacts
self.vnf_artifacts.obj_reset_changes(recursive=True)
self.obj_reset_changes(['vnf_artifacts'])
else:
self.vnf_artifacts = objects.\
VnfPackageArtifactInfoList(objects=[])
elif db_artifact:
self.vnf_artifacts = base.obj_make_list(
self._context, objects.VnfPackageArtifactInfoList(
self._context), objects.VnfPackageArtifactInfo,
db_artifact)
self.obj_reset_changes(['vnf_artifacts'])
def _load_generic(self, attrname):
vnf_package = self.__class__.get_by_id(self._context,
id=self.id,
@ -449,6 +487,8 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self._load_vnf_deployment_flavours()
elif attrname == 'vnfd':
self._load_vnfd()
elif attrname == 'vnf_artifacts':
self._load_vnf_artifacts()
elif attrname in self.fields and attrname != 'id':
self._load_generic(attrname)
else:
@ -472,7 +512,11 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
self.id = updates['id']
for key in ['vnf_deployment_flavours']:
if key in updates:
if key in updates.keys():
updates.pop(key)
for key in ['vnf_artifacts']:
if key in updates.keys():
updates.pop(key)
user_data = updates.pop('user_data', None)
@ -499,7 +543,7 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
def save(self):
updates = self.tacker_obj_get_changes()
for key in ['vnf_deployment_flavours']:
if key in updates:
if key in updates.keys():
updates.pop(key)
db_vnf_package = _vnf_package_update(self._context,
@ -607,6 +651,12 @@ class VnfPackage(base.TackerObject, base.TackerPersistentObject,
if checksum:
vnf_package_response.update(checksum)
artifacts = self.vnf_artifacts.to_dict(
include_fields=include_fields)
if artifacts:
vnf_package_response.update(
{'additionalArtifacts': artifacts})
return vnf_package_response

@ -115,6 +115,16 @@ rules = [
'path': '/vnf_packages/{vnf_package_id}/vnfd'
}
]),
policy.DocumentedRuleDefault(
name=VNFPKGM % 'fetch_artifact',
check_str=base.RULE_ADMIN_OR_OWNER,
description="reads the content of the artifact within a VNF package.",
operations=[
{
'method': 'GET',
'path': '/vnf_packages/{vnfPkgId}/artifacts/{artifactPath}'
}
]),
]

@ -39,3 +39,6 @@ LEASE_CHECK_EVENT_TIMEOUT = 300
LEASE_CHECK_SLEEP_TIME = 3
UUID = 'f26f181d-7891-4720-b022-b074ec1733ef'
INVALID_UUID = 'f181d-7891-4720-b022-b074ec3ef'
# artifact related
ARTIFACT_PATH = 'Scripts/install.sh'
INVALID_ARTIFACT_PATH = 'Fake_Scripts/fake_install.sh'

@ -1,6 +1,4 @@
# TODO:Manually change from version 1.2 to 1.0
tosca_definitions_version: tosca_simple_yaml_1_2
#tosca_definitions_version: tosca_simple_yaml_1_2
description: ETSI NFV SOL 001 common types definitions version 2.6.1
metadata:
template_name: etsi_nfv_sol001_common_types
@ -10,16 +8,16 @@ metadata:
data_types:
tosca.datatypes.nfv.L2AddressData:
derived_from: tosca.datatypes.Root
description: Describes the information on the MAC addresses to be assigned to a connection point.
description: Describes the information on the MAC addresses to be assigned to a connection point.
properties:
mac_address_assignment:
type: boolean
description: Specifies if the address assignment is the responsibility of management and orchestration function or not. If it is set to True, it is the management and orchestration function responsibility
required: true
required: true
tosca.datatypes.nfv.L3AddressData:
derived_from: tosca.datatypes.Root
description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
description: Provides information about Layer 3 level addressing scheme and parameters applicable to a CP
properties:
ip_address_assignment:
type: boolean
@ -27,7 +25,7 @@ data_types:
required: true
floating_ip_activated:
type: boolean
description: Specifies if the floating IP scheme is activated on the Connection Point or not
description: Specifies if the floating IP scheme is activated on the Connection Point or not
required: true
ip_address_type:
type: string
@ -37,14 +35,14 @@ data_types:
- valid_values: [ ipv4, ipv6 ]
number_of_ip_address:
type: integer
description: Minimum number of IP addresses to be assigned
description: Minimum number of IP addresses to be assigned
required: false
constraints:
- greater_than: 0
tosca.datatypes.nfv.AddressData:
derived_from: tosca.datatypes.Root
description: Describes information about the addressing scheme and parameters applicable to a CP
description: Describes information about the addressing scheme and parameters applicable to a CP
properties:
address_type:
type: string
@ -58,12 +56,12 @@ data_types:
required: false
l3_address_data:
type: tosca.datatypes.nfv.L3AddressData
description: Provides the information on the IP addresses to be assigned to a connection point
description: Provides the information on the IP addresses to be assigned to a connection point
required: false
tosca.datatypes.nfv.ConnectivityType:
derived_from: tosca.datatypes.Root
description: describes additional connectivity information of a virtualLink
description: describes additional connectivity information of a virtualLink
properties:
layer_protocols:
type: list
@ -82,7 +80,7 @@ data_types:
tosca.datatypes.nfv.LinkBitrateRequirements:
derived_from: tosca.datatypes.Root
description: describes the requirements in terms of bitrate for a virtual link
description: describes the requirements in terms of bitrate for a virtual link
properties:
root:
type: integer # in bits per second
@ -104,13 +102,13 @@ data_types:
associated_layer_protocol:
type: string
required: true
description: One of the values of the property layer_protocols of the CP
description: One of the values of the property layer_protocols of the CP
constraints:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
address_data:
type: list
description: Provides information on the addresses to be assigned to the CP
entry_schema:
entry_schema:
type: tosca.datatypes.nfv.AddressData
required: false
@ -137,23 +135,23 @@ data_types:
tosca.datatypes.nfv.Qos:
derived_from: tosca.datatypes.Root
description: describes QoS data for a given VL used in a VNF deployment flavour
description: describes QoS data for a given VL used in a VNF deployment flavour
properties:
latency:
type: scalar-unit.time #Number
description: Specifies the maximum latency
description: Specifies the maximum latency
required: true
constraints:
constraints:
- greater_than: 0 s
packet_delay_variation:
type: scalar-unit.time #Number
description: Specifies the maximum jitter
description: Specifies the maximum jitter
required: true
constraints:
constraints:
- greater_or_equal: 0 s
packet_loss_ratio:
type: float
description: Specifies the maximum packet loss ratio
description: Specifies the maximum packet loss ratio
required: false
constraints:
- in_range: [ 0.0, 1.0 ]
@ -162,21 +160,21 @@ capability_types:
tosca.capabilities.nfv.VirtualLinkable:
derived_from: tosca.capabilities.Node
description: A node type that includes the VirtualLinkable capability indicates that it can be pointed by tosca.relationships.nfv.VirtualLinksTo relationship type
relationship_types:
tosca.relationships.nfv.VirtualLinksTo:
derived_from: tosca.relationships.DependsOn
description: Represents an association relationship between the VduCp and VnfVirtualLink node types
description: Represents an association relationship between the VduCp and VnfVirtualLink node types
valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
node_types:
tosca.nodes.nfv.Cp:
derived_from: tosca.nodes.Root
description: Provides information regarding the purpose of the connection point
description: Provides information regarding the purpose of the connection point
properties:
layer_protocols:
type: list
description: Identifies which protocol the connection point uses for connectivity purposes
description: Identifies which protocol the connection point uses for connectivity purposes
required: true
entry_schema:
type: string
@ -184,17 +182,17 @@ node_types:
- valid_values: [ ethernet, mpls, odu2, ipv4, ipv6, pseudo-wire ]
role: #Name in ETSI NFV IFA011 v0.7.3: cpRole
type: string
description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
description: Identifies the role of the port in the context of the traffic flow patterns in the VNF or parent NS
required: false
constraints:
- valid_values: [ root, leaf ]
description:
type: string
description: Provides human-readable information on the purpose of the connection point
description: Provides human-readable information on the purpose of the connection point
required: false
protocol:
type: list
description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
description: Provides information on the addresses to be assigned to the connection point(s) instantiated from this Connection Point Descriptor
required: false
entry_schema:
type: tosca.datatypes.nfv.CpProtocolData

@ -1,24 +1,21 @@
# TODO:Manually change from version 1.2 to 1.0
tosca_definitions_version: tosca_simple_yaml_1_2
#tosca_definitions_version: tosca_simple_yaml_1_2
description: ETSI NFV SOL 001 vnfd types definitions version 2.6.1
metadata:
template_name: etsi_nfv_sol001_vnfd_types
template_author: ETSI_NFV
template_version: 2.6.1
# TODO:Manually change from version 1.2 to 1.0
#imports:
# - https://forge.etsi.org/rep/nfv/sol001/raw/v2.6.1/etsi_nfv_sol001_common_types.yaml
imports:
- ./etsi_nfv_sol001_common_types.yaml
data_types:
tosca.datatypes.nfv.VirtualNetworkInterfaceRequirements:
derived_from: tosca.datatypes.Root
description: Describes requirements on a virtual network interface
description: Describes requirements on a virtual network interface
properties:
name:
type: string
description: Provides a human readable name for the requirement.
description: Provides a human readable name for the requirement.
required: false
description:
type: string
@ -33,7 +30,7 @@ data_types:
description: The network interface requirements. A map of strings that contain a set of key-value pairs that describes the hardware platform specific network interface deployment requirements.
required: true
entry_schema:
type: string
type: string
nic_io_requirements:
type: tosca.datatypes.nfv.LogicalNodeData
description: references (couples) the CP with any logical node I/O requirements (for network devices) that may have been created. Linking these attributes is necessary so that so that I/O requirements that need to be articulated at the logical node level can be associated with the network interface requirements associated with the CP.
@ -71,7 +68,7 @@ data_types:
description: supports the specification of requirements related to virtual memory of a virtual compute resource
properties:
virtual_mem_size:
type: scalar-unit.size
type: scalar-unit.size
description: Amount of virtual memory.
required: true
virtual_mem_oversubscription_policy:
@ -83,7 +80,7 @@ data_types:
description: The hardware platform specific VDU memory requirements. A map of strings that contains a set of key-value pairs that describes hardware platform specific VDU memory requirements.
required: false
entry_schema:
type: string
type: string
numa_enabled:
type: boolean
description: It specifies the memory allocation to be cognisant of the relevant process/core allocation.
@ -117,7 +114,7 @@ data_types:
description: The hardware platform specific VDU CPU requirements. A map of strings that contains a set of key-value pairs describing VDU CPU specific hardware platform requirements.
required: false
entry_schema:
type: string
type: string
virtual_cpu_pinning:
type: tosca.datatypes.nfv.VirtualCpuPinning
description: The virtual CPU pinning configuration for the virtualised compute resource.
@ -142,12 +139,12 @@ data_types:
tosca.datatypes.nfv.VnfcConfigurableProperties:
derived_from: tosca.datatypes.Root
description: Defines the configurable properties of a VNFC
description: Defines the configurable properties of a VNFC
# properties:
# additional_vnfc_configurable_properties:
# type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties
# type: tosca.datatypes.nfv.VnfcAdditionalConfigurableProperties
# description: Describes additional configuration for VNFC that
# can be modified using the ModifyVnfInfo operation
# can be modified using the ModifyVnfInfo operation
# required: false
# derived types are expected to introduce
# additional_vnfc_configurable_properties with its type derived from
@ -163,13 +160,13 @@ data_types:
properties:
min_number_of_instances:
type: integer
description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
description: Minimum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
required: true
constraints:
- greater_or_equal: 0
max_number_of_instances:
type: integer
description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
description: Maximum number of instances of the VNFC based on this Vdu.Compute that is permitted to exist for a particular VNF deployment flavour.
required: true
constraints:
- greater_or_equal: 0
@ -240,7 +237,7 @@ data_types:
description: Specifies the maximum transmission unit (MTU) value for this L2 protocol.
required: false
constraints:
- greater_than: 0
- greater_than: 0
tosca.datatypes.nfv.L3ProtocolData:
derived_from: tosca.datatypes.Root
@ -259,7 +256,7 @@ data_types:
cidr: