Add vnf packages RestFul APIs
Implemented following APIs:- * GET /vnf_packages * POST /vnf_packages/ * GET /vnf_packages/{vnfPkgId} * DELETE /vnf_packages/{vnfPkgId} * PUT /vnf_packages/{vnfPkgId}/package_content * POST /vnf_packages/{vnfPkgId}/package_content/upload_from_uri Partial-Implements: blueprint tosca-csar-mgmt-driver Depends-On: If8155399df12a96cb86631dfa22eaca7a5a8d398 Co-Author: Neha Alhat <neha.alhat@nttdata.com> Change-Id: Id3b4812e24a1ed84fe94429e074f96ae11530517
This commit is contained in:
parent
8cdf3e543b
commit
4fa204b370
@ -1,6 +1,7 @@
|
||||
[DEFAULT]
|
||||
output_file = etc/tacker/tacker.conf.sample
|
||||
wrap_width = 79
|
||||
namespace = glance.store
|
||||
namespace = tacker.common.config
|
||||
namespace = tacker.conf
|
||||
namespace = tacker.wsgi
|
||||
|
@ -33,6 +33,7 @@ fixtures==3.0.0
|
||||
flake8==2.5.5
|
||||
future==0.16.0
|
||||
futurist==1.6.0
|
||||
glance-store==0.26.1
|
||||
google-auth==1.4.1
|
||||
greenlet==0.4.13
|
||||
hacking==0.12.0
|
||||
@ -151,7 +152,7 @@ tenacity==4.9.0
|
||||
testresources==2.0.1
|
||||
testscenarios==0.5.0
|
||||
testtools==2.2.0
|
||||
tosca-parser==0.8.1
|
||||
tosca-parser==1.6.0
|
||||
traceback2==1.4.0
|
||||
unittest2==1.1.0
|
||||
urllib3==1.22
|
||||
|
@ -39,7 +39,7 @@ oslo.versionedobjects>=1.33.3 # Apache-2.0
|
||||
openstackdocstheme>=1.20.0 # Apache-2.0
|
||||
python-neutronclient>=6.7.0 # Apache-2.0
|
||||
python-novaclient>=9.1.0 # Apache-2.0
|
||||
tosca-parser>=0.8.1 # Apache-2.0
|
||||
tosca-parser>=1.6.0 # Apache-2.0
|
||||
heat-translator>=1.3.1 # Apache-2.0
|
||||
cryptography>=2.1 # BSD/Apache-2.0
|
||||
paramiko>=2.0.0 # LGPLv2.1+
|
||||
@ -50,3 +50,6 @@ castellan>=0.16.0 # Apache-2.0
|
||||
kubernetes>=5.0.0 # Apache-2.0
|
||||
setuptools!=24.0.0,!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2,!=36.2.0,>=21.0.0 # PSF/ZPL
|
||||
PyYAML>=3.12 # MIT
|
||||
|
||||
# Glance Store
|
||||
glance-store>=0.26.1 # Apache-2.0
|
||||
|
BIN
samples/vnf_packages/sample_vnf_pkg.zip
Normal file
BIN
samples/vnf_packages/sample_vnf_pkg.zip
Normal file
Binary file not shown.
105
tacker/api/views/vnf_packages.py
Normal file
105
tacker/api/views/vnf_packages.py
Normal file
@ -0,0 +1,105 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tacker.objects import fields
|
||||
|
||||
|
||||
class ViewBuilder(object):
|
||||
|
||||
def _get_links(self, vnf_package):
|
||||
return {
|
||||
"_links": {
|
||||
"self": {
|
||||
"href": '/vnfpkgm/v1/vnf_packages/%s'
|
||||
% vnf_package.id
|
||||
},
|
||||
"packageContent": {
|
||||
"href": '/vnfpkgm/v1/vnf_packages/%s/package_content'
|
||||
% vnf_package.id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def _get_software_images(self, vnf_deployment_flavours):
|
||||
software_images = list()
|
||||
for vnf_deployment_flavour in vnf_deployment_flavours:
|
||||
for sw_image in vnf_deployment_flavour.software_images:
|
||||
software_images.append({
|
||||
"id": sw_image.software_image_id,
|
||||
"name": sw_image.name,
|
||||
"provider": "provider",
|
||||
"version": sw_image.version,
|
||||
"checksum": {
|
||||
"algorithm": sw_image.algorithm,
|
||||
"hash": sw_image.hash
|
||||
},
|
||||
"containerFormat": sw_image.container_format,
|
||||
"diskFormat": sw_image.disk_format,
|
||||
"minDisk": sw_image.min_disk,
|
||||
"minRam": sw_image.min_ram,
|
||||
"size": sw_image.size,
|
||||
"imagePath": sw_image.image_path,
|
||||
"userMetadata": sw_image.metadata
|
||||
})
|
||||
|
||||
return {'softwareImages': software_images}
|
||||
|
||||
def _get_vnfd(self, vnf_package):
|
||||
vnfd = vnf_package.vnfd
|
||||
return {
|
||||
'vnfdId': vnfd.vnfd_id,
|
||||
'vnfProvider': vnfd.vnf_provider,
|
||||
'vnfProductName': vnfd.vnf_product_name,
|
||||
'vnfSoftwareVersion': vnfd.vnf_software_version,
|
||||
'vnfdVersion': vnfd.vnfd_version
|
||||
}
|
||||
|
||||
def _basic_vnf_package_info(self, vnf_package):
|
||||
return {
|
||||
'id': vnf_package.id,
|
||||
'onboardingState': vnf_package.onboarding_state,
|
||||
'operationalState': vnf_package.operational_state,
|
||||
'usageState': vnf_package.usage_state,
|
||||
'userDefinedData': vnf_package.user_data,
|
||||
}
|
||||
|
||||
def _get_vnf_package(self, vnf_package):
|
||||
vnf_package_response = self._basic_vnf_package_info(vnf_package)
|
||||
|
||||
links = self._get_links(vnf_package)
|
||||
vnf_package_response.update(links)
|
||||
|
||||
if (vnf_package.onboarding_state ==
|
||||
fields.PackageOnboardingStateType.ONBOARDED):
|
||||
# add software images
|
||||
vnf_deployment_flavours = vnf_package.vnf_deployment_flavours
|
||||
vnf_package_response.update(self._get_software_images(
|
||||
vnf_deployment_flavours))
|
||||
|
||||
vnf_package_response.update(self._get_vnfd(vnf_package))
|
||||
|
||||
return vnf_package_response
|
||||
|
||||
def create(self, request, vnf_package):
|
||||
|
||||
return self._get_vnf_package(vnf_package)
|
||||
|
||||
def show(self, request, vnf_package):
|
||||
|
||||
return self._get_vnf_package(vnf_package)
|
||||
|
||||
def index(self, request, vnf_packages):
|
||||
return {'vnf_packages': [self._get_vnf_package(
|
||||
vnf_package) for vnf_package in vnf_packages]}
|
@ -13,30 +13,224 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import uuidutils
|
||||
from six.moves import http_client
|
||||
from six.moves import urllib
|
||||
import webob
|
||||
|
||||
from tacker._i18n import _
|
||||
from tacker.api.schemas import vnf_packages
|
||||
from tacker.api import validation
|
||||
from tacker.api.views import vnf_packages as vnf_packages_view
|
||||
from tacker.common import exceptions
|
||||
from tacker.conductor.conductorrpc import vnf_pkgm_rpc
|
||||
from tacker.glance_store import store as glance_store
|
||||
from tacker.objects import fields
|
||||
from tacker.objects import vnf_package as vnf_package_obj
|
||||
from tacker.policies import vnf_package as vnf_package_policies
|
||||
from tacker import wsgi
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class VnfPkgmController(wsgi.Controller):
|
||||
|
||||
_view_builder_class = vnf_packages_view.ViewBuilder
|
||||
|
||||
def __init__(self):
|
||||
super(VnfPkgmController, self).__init__()
|
||||
self.rpc_api = vnf_pkgm_rpc.VNFPackageRPCAPI()
|
||||
glance_store.initialize_glance_store()
|
||||
|
||||
@wsgi.response(http_client.CREATED)
|
||||
@wsgi.expected_errors((http_client.BAD_REQUEST, http_client.FORBIDDEN))
|
||||
@validation.schema(vnf_packages.create)
|
||||
def create(self, request, body):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'create')
|
||||
|
||||
vnf_package = vnf_package_obj.VnfPackage(context=request.context)
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.CREATED)
|
||||
vnf_package.operational_state = (
|
||||
fields.PackageOperationalStateType.DISABLED)
|
||||
vnf_package.usage_state = fields.PackageUsageStateType.NOT_IN_USE
|
||||
vnf_package.user_data = body.get('userDefinedData', dict())
|
||||
vnf_package.tenant_id = request.context.project_id
|
||||
|
||||
vnf_package.create()
|
||||
|
||||
return self._view_builder.create(request, vnf_package)
|
||||
|
||||
@wsgi.response(http_client.OK)
|
||||
@wsgi.expected_errors((http_client.FORBIDDEN, http_client.NOT_FOUND))
|
||||
def show(self, request, id):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'show')
|
||||
|
||||
# check if id is of type uuid format
|
||||
if not uuidutils.is_uuid_like(id):
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
try:
|
||||
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
|
||||
request.context, id,
|
||||
expected_attrs=["vnf_deployment_flavours", "vnfd"])
|
||||
except exceptions.VnfPackageNotFound:
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
return self._view_builder.show(request, vnf_package)
|
||||
|
||||
@wsgi.response(http_client.OK)
|
||||
@wsgi.expected_errors((http_client.FORBIDDEN))
|
||||
def index(self, request):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'index')
|
||||
|
||||
vnf_packages = vnf_package_obj.VnfPackagesList.get_all(
|
||||
request.context,
|
||||
expected_attrs=["vnf_deployment_flavours", "vnfd"])
|
||||
|
||||
return self._view_builder.index(request, vnf_packages)
|
||||
|
||||
@wsgi.response(http_client.NO_CONTENT)
|
||||
@wsgi.expected_errors((http_client.FORBIDDEN, http_client.NOT_FOUND))
|
||||
def delete(self, request, id):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'delete')
|
||||
|
||||
# check if id is of type uuid format
|
||||
if not uuidutils.is_uuid_like(id):
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
try:
|
||||
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
|
||||
request.context, id)
|
||||
except exceptions.VnfPackageNotFound:
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
if vnf_package.operational_state == \
|
||||
fields.PackageUsageStateType.IN_USE:
|
||||
msg = _("VNF Package %(id)s usage state is %(state)s")
|
||||
raise webob.exc.HTTPConflict(
|
||||
explanation=msg % {
|
||||
"id": id,
|
||||
"state": fields.PackageOperationalStateType.ENABLED})
|
||||
|
||||
# Delete vnf_package
|
||||
self.rpc_api.delete_vnf_package(context, vnf_package)
|
||||
|
||||
@wsgi.response(http_client.ACCEPTED)
|
||||
@wsgi.expected_errors((http_client.FORBIDDEN, http_client.NOT_FOUND,
|
||||
http_client.CONFLICT))
|
||||
def upload_vnf_package_content(self, request, id, body):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'upload_package_content')
|
||||
|
||||
# check if id is of type uuid format
|
||||
if not uuidutils.is_uuid_like(id):
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
try:
|
||||
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
|
||||
request.context, id)
|
||||
except exceptions.VnfPackageNotFound:
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
if vnf_package.onboarding_state != \
|
||||
fields.PackageOnboardingStateType.CREATED:
|
||||
msg = _("VNF Package %(id)s onboarding state "
|
||||
"is not %(onboarding)s")
|
||||
raise webob.exc.HTTPConflict(explanation=msg % {"id": id,
|
||||
"onboarding": fields.PackageOnboardingStateType.CREATED})
|
||||
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.UPLOADING)
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
try:
|
||||
(location, size, checksum, multihash,
|
||||
loc_meta) = glance_store.store_csar(context, id, body)
|
||||
except exceptions.UploadFailedToGlanceStore:
|
||||
with excutils.save_and_reraise_exception():
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.CREATED)
|
||||
vnf_package.save()
|
||||
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.PROCESSING)
|
||||
|
||||
vnf_package.algorithm = CONF.vnf_package.hashing_algorithm
|
||||
vnf_package.hash = multihash
|
||||
vnf_package.location_glance_store = location
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
# process vnf_package
|
||||
self.rpc_api.upload_vnf_package_content(context, vnf_package)
|
||||
|
||||
@wsgi.response(http_client.ACCEPTED)
|
||||
@wsgi.expected_errors((http_client.BAD_REQUEST, http_client.FORBIDDEN,
|
||||
http_client.NOT_FOUND, http_client.CONFLICT))
|
||||
@validation.schema(vnf_packages.upload_from_uri)
|
||||
def upload_vnf_package_from_uri(self, request, id, body):
|
||||
raise webob.exc.HTTPNotImplemented()
|
||||
|
||||
context = request.environ['tacker.context']
|
||||
context.can(vnf_package_policies.VNFPKGM % 'upload_from_uri')
|
||||
|
||||
# check if id is of type uuid format
|
||||
if not uuidutils.is_uuid_like(id):
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
url = body['addressInformation']
|
||||
try:
|
||||
data_iter = urllib.request.urlopen(url)
|
||||
except Exception:
|
||||
data_iter = None
|
||||
msg = _("Failed to open URL %s")
|
||||
raise webob.exc.HTTPBadRequest(explanation=msg % url)
|
||||
finally:
|
||||
if hasattr(data_iter, 'close'):
|
||||
data_iter.close()
|
||||
|
||||
try:
|
||||
vnf_package = vnf_package_obj.VnfPackage.get_by_id(
|
||||
request.context, id)
|
||||
except exceptions.VnfPackageNotFound:
|
||||
msg = _("Can not find requested vnf package: %s") % id
|
||||
raise webob.exc.HTTPNotFound(explanation=msg)
|
||||
|
||||
if vnf_package.onboarding_state != \
|
||||
fields.PackageOnboardingStateType.CREATED:
|
||||
msg = _("VNF Package %(id)s onboarding state is not "
|
||||
"%(onboarding)s")
|
||||
raise webob.exc.HTTPConflict(explanation=msg % {"id": id,
|
||||
"onboarding": fields.PackageOnboardingStateType.CREATED})
|
||||
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.UPLOADING)
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
# process vnf_package
|
||||
self.rpc_api.upload_vnf_package_from_uri(context, vnf_package,
|
||||
body['addressInformation'],
|
||||
user_name=body.get('userName'),
|
||||
password=body.get('password'))
|
||||
|
||||
|
||||
def create_resource():
|
||||
|
310
tacker/common/csar_utils.py
Normal file
310
tacker/common/csar_utils.py
Normal file
@ -0,0 +1,310 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import excutils
|
||||
from toscaparser.tosca_template import ToscaTemplate
|
||||
import zipfile
|
||||
|
||||
from tacker.common import exceptions
|
||||
import tacker.conf
|
||||
|
||||
|
||||
CONF = tacker.conf.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _check_type(custom_def, node_type, type_list):
|
||||
for node_data_type, node_data_type_value in custom_def.items():
|
||||
if node_data_type == node_type and node_type in type_list:
|
||||
return True, node_data_type_value
|
||||
for k, v in node_data_type_value.items():
|
||||
if k == 'derived_from':
|
||||
if v in type_list and node_type == node_data_type:
|
||||
return True, node_data_type_value
|
||||
return False, None
|
||||
|
||||
|
||||
def _get_sw_image_artifact(artifacts):
|
||||
if not artifacts:
|
||||
return
|
||||
|
||||
for artifact_value in artifacts.values():
|
||||
if 'type' in artifact_value:
|
||||
if artifact_value['type'] == 'tosca.artifacts.nfv.SwImage':
|
||||
return artifact_value
|
||||
|
||||
|
||||
def _update_default_vnfd_data(node_value, node_type_value):
|
||||
vnf_properties = node_value['properties']
|
||||
type_properties = node_type_value['properties']
|
||||
for property_key, property_value in type_properties.items():
|
||||
if property_key == 'descriptor_id':
|
||||
# if descriptor_id is parameterized, then get the value from the
|
||||
# default property and set it in the vnf_properties.
|
||||
if vnf_properties and isinstance(
|
||||
vnf_properties.get('descriptor_id'), dict):
|
||||
vnf_properties['descriptor_id'] = property_value.get("default")
|
||||
return vnf_properties
|
||||
|
||||
|
||||
def _get_vnf_data(nodetemplates):
|
||||
type_list = ['tosca.nodes.nfv.VNF']
|
||||
for nt in nodetemplates:
|
||||
for node_name, node_value in nt.templates.items():
|
||||
type_status, node_type_value = _check_type(nt.custom_def,
|
||||
node_value['type'], type_list)
|
||||
if type_status and node_type_value:
|
||||
return _update_default_vnfd_data(node_value, node_type_value)
|
||||
|
||||
|
||||
def _get_instantiation_levels(policies):
|
||||
if policies:
|
||||
for policy in policies:
|
||||
if policy.type_definition.type == \
|
||||
'tosca.policies.nfv.InstantiationLevels':
|
||||
return policy.properties
|
||||
|
||||
|
||||
def _update_flavour_data_from_vnf(custom_defs, node_tpl, flavour):
|
||||
type_list = ['tosca.nodes.nfv.VNF']
|
||||
|
||||
type_status, _ = _check_type(custom_defs, node_tpl['type'], type_list)
|
||||
if type_status and node_tpl['properties']:
|
||||
vnf_properties = node_tpl['properties']
|
||||
if 'flavour_description' in vnf_properties:
|
||||
flavour.update(
|
||||
{'flavour_description': vnf_properties[
|
||||
'flavour_description']})
|
||||
if 'flavour_id' in vnf_properties:
|
||||
flavour.update({'flavour_id': vnf_properties['flavour_id']})
|
||||
|
||||
|
||||
def _get_software_image(custom_defs, nodetemplate_name, node_tpl):
|
||||
type_list = ['tosca.nodes.nfv.Vdu.Compute',
|
||||
'tosca.nodes.nfv.Vdu.VirtualBlockStorage']
|
||||
type_status, _ = _check_type(custom_defs, node_tpl['type'], type_list)
|
||||
if type_status:
|
||||
properties = node_tpl['properties']
|
||||
sw_image_artifact = _get_sw_image_artifact(node_tpl.get('artifacts'))
|
||||
if sw_image_artifact:
|
||||
properties['sw_image_data'].update(
|
||||
{'software_image_id': nodetemplate_name})
|
||||
sw_image_data = properties['sw_image_data']
|
||||
if 'metadata' in sw_image_artifact:
|
||||
sw_image_data.update({'metadata':
|
||||
sw_image_artifact['metadata']})
|
||||
return sw_image_data
|
||||
|
||||
|
||||
def _populate_flavour_data(tosca):
|
||||
flavours = []
|
||||
for tp in tosca.nested_tosca_templates_with_topology:
|
||||
sw_image_list = []
|
||||
|
||||
# Setting up flavour data
|
||||
flavour_id = tp.substitution_mappings.properties.get('flavour_id')
|
||||
if flavour_id:
|
||||
flavour = {'flavour_id': flavour_id}
|
||||
else:
|
||||
flavour = {}
|
||||
instantiation_levels = _get_instantiation_levels(tp.policies)
|
||||
if instantiation_levels:
|
||||
flavour.update({'instantiation_levels': instantiation_levels})
|
||||
for template_name, node_tpl in tp.tpl.get('node_templates').items():
|
||||
# check the flavour property in vnf data
|
||||
_update_flavour_data_from_vnf(tp.custom_defs, node_tpl, flavour)
|
||||
|
||||
# Update the software image data
|
||||
sw_image = _get_software_image(tp.custom_defs, template_name,
|
||||
node_tpl)
|
||||
if sw_image:
|
||||
sw_image_list.append(sw_image)
|
||||
|
||||
# Add software images for flavour
|
||||
if sw_image_list:
|
||||
flavour.update({'sw_images': sw_image_list})
|
||||
|
||||
if flavour:
|
||||
flavours.append(flavour)
|
||||
|
||||
return flavours
|
||||
|
||||
|
||||
def _get_instantiation_levels_from_policy(tpl_policies):
|
||||
"""Get defined instantiation levels
|
||||
|
||||
Getting instantiation levels defined under policy type
|
||||
'tosca.policies.nfv.InstantiationLevels'.
|
||||
"""
|
||||
|
||||
levels = []
|
||||
for policy in tpl_policies:
|
||||
for key, value in policy.items():
|
||||
if value.get('type') == 'tosca.policies.nfv.InstantiationLevels'\
|
||||
and value.get('properties', {}).get('levels', {}):
|
||||
levels = value.get('properties').get('levels').keys()
|
||||
default_level = value.get(
|
||||
'properties').get('default_level')
|
||||
|
||||
if default_level and default_level not in levels:
|
||||
error_msg = "Level {} not found in defined levels" \
|
||||
" {}".format(default_level,
|
||||
",".join(sorted(levels)))
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
return levels
|
||||
|
||||
|
||||
def _validate_instantiation_levels(policy, instantiation_levels):
|
||||
expected_policy_type = ['tosca.policies.nfv.VduInstantiationLevels',
|
||||
'tosca.policies.nfv.'
|
||||
'VirtualLinkInstantiationLevels']
|
||||
for policy_name, policy_tpl in policy.items():
|
||||
if policy_tpl.get('type') not in expected_policy_type:
|
||||
return
|
||||
|
||||
if not instantiation_levels:
|
||||
msg = ('Policy of type'
|
||||
' "tosca.policies.nfv.InstantiationLevels is not defined.')
|
||||
raise exceptions.InvalidCSAR(msg)
|
||||
if policy_tpl.get('properties'):
|
||||
levels_in_policy = policy_tpl.get(
|
||||
'properties').get('levels')
|
||||
|
||||
if levels_in_policy:
|
||||
invalid_levels = set(levels_in_policy.keys()) - set(
|
||||
instantiation_levels)
|
||||
else:
|
||||
invalid_levels = set()
|
||||
|
||||
if invalid_levels:
|
||||
error_msg = "Level(s) {} not found in defined levels" \
|
||||
" {}".format(",".join(sorted(invalid_levels)),
|
||||
",".join(sorted(instantiation_levels)
|
||||
))
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
|
||||
|
||||
def _validate_sw_image_data_for_artifact(node_tpl, template_name):
|
||||
artifact_type = []
|
||||
artifacts = node_tpl.get('artifacts')
|
||||
if artifacts:
|
||||
for key, value in artifacts.items():
|
||||
if value.get('type') == 'tosca.artifacts.nfv.SwImage':
|
||||
artifact_type.append(value.get('type'))
|
||||
|
||||
if len(artifact_type) > 1:
|
||||
error_msg = ('artifacts of type "tosca.artifacts.nfv.SwImage"'
|
||||
' is added more than one time for'
|
||||
' node %(node)s.') % {'node': template_name}
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
|
||||
if artifact_type and node_tpl.get('properties'):
|
||||
if not node_tpl.get('properties').get('sw_image_data'):
|
||||
error_msg = ('Node property "sw_image_data" is missing for'
|
||||
' artifact type %(type)s for '
|
||||
'node %(node)s.') % {
|
||||
'type': artifact_type[0], 'node': template_name}
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
|
||||
|
||||
def _validate_sw_image_data_for_artifacts(tosca):
|
||||
for tp in tosca.nested_tosca_templates_with_topology:
|
||||
for template_name, node_tpl in tp.tpl.get('node_templates').items():
|
||||
_validate_sw_image_data_for_artifact(node_tpl, template_name)
|
||||
|
||||
for template in tosca.nodetemplates:
|
||||
_validate_sw_image_data_for_artifact(
|
||||
template.entity_tpl, template.name)
|
||||
|
||||
|
||||
def _get_data_from_csar(tosca, context, id):
|
||||
for tp in tosca.nested_tosca_templates_with_topology:
|
||||
levels = _get_instantiation_levels_from_policy(tp.tpl.get("policies"))
|
||||
for policy_tpl in tp.tpl.get("policies"):
|
||||
_validate_instantiation_levels(policy_tpl, levels)
|
||||
|
||||
_validate_sw_image_data_for_artifacts(tosca)
|
||||
vnf_data = _get_vnf_data(tosca.nodetemplates)
|
||||
if not vnf_data:
|
||||
error_msg = "VNF properties are mandatory"
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
|
||||
flavours = _populate_flavour_data(tosca)
|
||||
if not flavours:
|
||||
error_msg = "No VNF flavours are available"
|
||||
raise exceptions.InvalidCSAR(error_msg)
|
||||
|
||||
return vnf_data, flavours
|
||||
|
||||
|
||||
def _extract_csar_zip_file(file_path, extract_path):
|
||||
try:
|
||||
with zipfile.ZipFile(file_path, 'r') as zf:
|
||||
zf.extractall(extract_path)
|
||||
except (RuntimeError, zipfile.BadZipfile) as exp:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.error("Error encountered while extracting "
|
||||
"csar zip file %(path)s. Error: %(error)s.",
|
||||
{'path': file_path,
|
||||
'error': encodeutils.exception_to_unicode(exp)})
|
||||
exp.reraise = False
|
||||
raise exceptions.InvalidZipFile(path=file_path)
|
||||
|
||||
|
||||
def load_csar_data(context, package_uuid, zip_path):
|
||||
|
||||
extract_zip_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
|
||||
package_uuid)
|
||||
_extract_csar_zip_file(zip_path, extract_zip_path)
|
||||
|
||||
try:
|
||||
tosca = ToscaTemplate(zip_path, None, True)
|
||||
return _get_data_from_csar(tosca, context, package_uuid)
|
||||
except exceptions.InvalidCSAR as exp:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.error("Error processing CSAR file %(path)s for vnf package"
|
||||
" %(uuid)s: Error: %(error)s. ",
|
||||
{'path': zip_path, 'uuid': package_uuid,
|
||||
'error': encodeutils.exception_to_unicode(exp)})
|
||||
except Exception as exp:
|
||||
with excutils.save_and_reraise_exception():
|
||||
LOG.error("Tosca parser failed for vnf package %(uuid)s: "
|
||||
"Error: %(error)s. ", {'uuid': package_uuid,
|
||||
'error': encodeutils.exception_to_unicode(exp)})
|
||||
exp.reraise = False
|
||||
raise exceptions.InvalidCSAR(encodeutils.exception_to_unicode
|
||||
(exp))
|
||||
|
||||
|
||||
def delete_csar_data(package_uuid):
|
||||
# Remove zip and folder from the vnf_package_csar_path
|
||||
csar_zip_temp_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
|
||||
package_uuid)
|
||||
csar_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
|
||||
package_uuid + ".zip")
|
||||
|
||||
try:
|
||||
shutil.rmtree(csar_zip_temp_path)
|
||||
os.remove(csar_path)
|
||||
except OSError as exc:
|
||||
exc_message = encodeutils.exception_to_unicode(exc)
|
||||
msg = _('Failed to delete csar folder: '
|
||||
'%(csar_path)s, Error: %(exc)s')
|
||||
LOG.warning(msg, {'csar_path': csar_path, 'exc': exc_message})
|
@ -136,9 +136,7 @@ class MalformedRequestBody(BadRequest):
|
||||
|
||||
|
||||
class Invalid(TackerException):
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
super(Invalid, self).__init__()
|
||||
message = _("Bad Request - Invalid Parameters")
|
||||
|
||||
|
||||
class InvalidInput(BadRequest):
|
||||
@ -219,3 +217,35 @@ class VnfSoftwareImageNotFound(NotFound):
|
||||
|
||||
class OrphanedObjectError(TackerException):
|
||||
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
|
||||
|
||||
|
||||
class CSARFileSizeLimitExceeded(TackerException):
|
||||
message = _("The provided CSAR file is too large.")
|
||||
|
||||
|
||||
class VNFPackageURLInvalid(Invalid):
|
||||
message = _("Failed to open URL %(url)s")
|
||||
|
||||
|
||||
class InvalidZipFile(Invalid):
|
||||
message = _("Invalid zip file : %(path)s")
|
||||
|
||||
|
||||
class UploadFailedToGlanceStore(Invalid):
|
||||
message = _("Failed to upload vnf package %(uuid)s to glance store: "
|
||||
"%(error)s")
|
||||
|
||||
|
||||
class InvalidCSAR(Invalid):
|
||||
message = _("Invalid csar: %(error)s")
|
||||
|
||||
|
||||
class LimitExceeded(TackerException):
|
||||
message = _("The request returned a 413 Request Entity Too Large. This "
|
||||
"generally means that rate limiting or a quota threshold was "
|
||||
"breached.\n\nThe response body:\n%(body)s")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.retry_after = (int(kwargs['retry']) if kwargs.get('retry')
|
||||
else None)
|
||||
super(LimitExceeded, self).__init__(*args, **kwargs)
|
||||
|
@ -27,6 +27,7 @@ from oslo_utils import excutils
|
||||
|
||||
from tacker.common import exceptions
|
||||
from tacker import context
|
||||
from tacker.objects import base as objects_base
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
@ -60,7 +61,8 @@ def init(conf):
|
||||
allowed_remote_exmods=exmods)
|
||||
NOTIFICATION_TRANSPORT = oslo_messaging.get_notification_transport(
|
||||
conf, allowed_remote_exmods=exmods)
|
||||
serializer = RequestContextSerializer()
|
||||
json_serializer = oslo_messaging.JsonPayloadSerializer()
|
||||
serializer = RequestContextSerializer(json_serializer)
|
||||
NOTIFIER = oslo_messaging.Notifier(NOTIFICATION_TRANSPORT,
|
||||
serializer=serializer)
|
||||
|
||||
@ -298,7 +300,8 @@ class Connection(object):
|
||||
target = oslo_messaging.Target(
|
||||
topic=topic, server=host or cfg.CONF.host, fanout=fanout,
|
||||
exchange=exchange)
|
||||
server = get_server(target, endpoints)
|
||||
serializer = objects_base.TackerObjectSerializer()
|
||||
server = get_server(target, endpoints, serializer)
|
||||
self.servers.append(server)
|
||||
|
||||
def consume_in_threads(self):
|
||||
|
41
tacker/common/safe_utils.py
Normal file
41
tacker/common/safe_utils.py
Normal file
@ -0,0 +1,41 @@
|
||||
# Copyright 2010 United States Government as represented by the
|
||||
# Administrator of the National Aeronautics and Space Administration.
|
||||
# Copyright 2011 Justin Santa Barbara
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Utilities and helper functions that won't produce circular imports."""
|
||||
|
||||
|
||||
def get_wrapped_function(function):
|
||||
"""Get the method at the bottom of a stack of decorators."""
|
||||
if not hasattr(function, '__closure__') or not function.__closure__:
|
||||
return function
|
||||
|
||||
def _get_wrapped_function(function):
|
||||
if not hasattr(function, '__closure__') or not function.__closure__:
|
||||
return None
|
||||
|
||||
for closure in function.__closure__:
|
||||
func = closure.cell_contents
|
||||
|
||||
deeper_func = _get_wrapped_function(func)
|
||||
if deeper_func:
|
||||
return deeper_func
|
||||
elif hasattr(closure.cell_contents, '__call__'):
|
||||
return closure.cell_contents
|
||||
|
||||
return function
|
||||
|
||||
return _get_wrapped_function(function)
|
@ -18,6 +18,8 @@
|
||||
|
||||
"""Utilities and helper functions."""
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import logging as std_logging
|
||||
import os
|
||||
import random
|
||||
@ -32,11 +34,18 @@ import netaddr
|
||||
from oslo_concurrency import lockutils
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import importutils
|
||||
from stevedore import driver
|
||||
try:
|
||||
from eventlet import sleep
|
||||
except ImportError:
|
||||
from time import sleep
|
||||
|
||||
from tacker._i18n import _
|
||||
from tacker.common import constants as q_const
|
||||
from tacker.common import exceptions
|
||||
from tacker.common import safe_utils
|
||||
|
||||
|
||||
TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
|
||||
@ -66,6 +75,12 @@ MEM_UNITS = {
|
||||
}
|
||||
CONF = cfg.CONF
|
||||
synchronized = lockutils.synchronized_with_prefix(SYNCHRONIZED_PREFIX)
|
||||
MAX_COOP_READER_BUFFER_SIZE = 134217728
|
||||
|
||||
if hasattr(inspect, 'getfullargspec'):
|
||||
getargspec = inspect.getfullargspec
|
||||
else:
|
||||
getargspec = inspect.getargspec
|
||||
|
||||
|
||||
def find_config_file(options, config_file):
|
||||
@ -231,3 +246,213 @@ def none_from_string(orig_str):
|
||||
return None
|
||||
else:
|
||||
return orig_str
|
||||
|
||||
|
||||
def expects_func_args(*args):
|
||||
def _decorator_checker(dec):
|
||||
@functools.wraps(dec)
|
||||
def _decorator(f):
|
||||
base_f = safe_utils.get_wrapped_function(f)
|
||||
argspec = getargspec(base_f)
|
||||
if argspec[1] or argspec[2] or set(args) <= set(argspec[0]):
|
||||
# NOTE (nirajsingh): We can't really tell if correct stuff will
|
||||
# be passed if it's a function with *args or **kwargs so
|
||||
# we still carry on and hope for the best
|
||||
return dec(f)
|
||||
else:
|
||||
raise TypeError("Decorated function %(f_name)s does not "
|
||||
"have the arguments expected by the "
|
||||
"decorator %(d_name)s" %
|
||||
{'f_name': base_f.__name__,
|
||||
'd_name': dec.__name__})
|
||||
|
||||
return _decorator
|
||||
|
||||
return _decorator_checker
|
||||
|
||||
|
||||
def cooperative_iter(iter):
|
||||
"""Prevent eventlet thread starvation during iteration
|
||||
|
||||
Return an iterator which schedules after each
|
||||
iteration. This can prevent eventlet thread starvation.
|
||||
|
||||
:param iter: an iterator to wrap
|
||||
"""
|
||||
try:
|
||||
for chunk in iter:
|
||||
sleep(0)
|
||||
yield chunk
|
||||
except Exception as err:
|
||||
with excutils.save_and_reraise_exception():
|
||||
msg = _("Error: cooperative_iter exception %s") % err
|
||||
LOG.error(msg)
|
||||
|
||||
|
||||
def cooperative_read(fd):
|
||||
"""Prevent eventlet thread starvationafter each read operation.
|
||||
|
||||
Wrap a file descriptor's read with a partial function which schedules
|
||||
after each read. This can prevent eventlet thread starvation.
|
||||
|
||||
:param fd: a file descriptor to wrap
|
||||
"""
|
||||
def readfn(*args):
|
||||
result = fd.read(*args)
|
||||
sleep(0)
|
||||
return result
|
||||
return readfn
|
||||
|
||||
|
||||
def chunkreadable(iter, chunk_size=65536):
|
||||
"""Wrap a readable iterator.
|
||||
|
||||
Wrap a readable iterator with a reader yielding chunks of
|
||||
a preferred size, otherwise leave iterator unchanged.
|
||||
|
||||
:param iter: an iter which may also be readable
|
||||
:param chunk_size: maximum size of chunk
|
||||
"""
|
||||
return chunkiter(iter, chunk_size) if hasattr(iter, 'read') else iter
|
||||
|
||||
|
||||
def chunkiter(fp, chunk_size=65536):
|
||||
"""Convert iterator to a file-like object.
|
||||
|
||||
Return an iterator to a file-like obj which yields fixed size chunks
|
||||
|
||||
:param fp: a file-like object
|
||||
:param chunk_size: maximum size of chunk
|
||||
"""
|
||||
while True:
|
||||
chunk = fp.read(chunk_size)
|
||||
if chunk:
|
||||
yield chunk
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
class CooperativeReader(object):
|
||||
"""An eventlet thread friendly class for reading in image data.
|
||||
|
||||
When accessing data either through the iterator or the read method
|
||||
we perform a sleep to allow a co-operative yield. When there is more than
|
||||
one image being uploaded/downloaded this prevents eventlet thread
|
||||
starvation, ie allows all threads to be scheduled periodically rather than
|
||||
having the same thread be continuously active.
|
||||
"""
|
||||
def __init__(self, fd):
|
||||
"""Construct an CooperativeReader object.
|
||||
|
||||
:param fd: Underlying image file object
|
||||
|
||||
"""
|
||||
self.fd = fd
|
||||
self.iterator = None
|
||||
# NOTE(nirajsingh): if the underlying supports read(), overwrite the
|
||||
# default iterator-based implementation with cooperative_read which
|
||||
# is more straightforward
|
||||
if hasattr(fd, 'read'):
|
||||
self.read = cooperative_read(fd)
|
||||
else:
|
||||
self.iterator = None
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
|
||||
def read(self, length=None):
|
||||
"""Return the requested amount of bytes.
|
||||
|
||||
Fetching the next chunk of the underlying iterator when needed.
|
||||
This is replaced with cooperative_read in __init__ if the underlying
|
||||
fd already supports read().
|
||||
|
||||
"""
|
||||
|
||||
if length is None:
|
||||
if len(self.buffer) - self.position > 0:
|
||||
# if no length specified but some data exists in buffer,
|
||||
# return that data and clear the buffer
|
||||
result = self.buffer[self.position:]
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
return bytes(result)
|
||||
else:
|
||||
# otherwise read the next chunk from the underlying iterator
|
||||
# and return it as a whole. Reset the buffer, as subsequent
|
||||
# calls may specify the length
|
||||
try:
|
||||
if self.iterator is None:
|
||||
self.iterator = self.__iter__()
|
||||
return next(self.iterator)
|
||||
except StopIteration:
|
||||
return b''
|
||||
finally:
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
else:
|
||||
result = bytearray()
|
||||
while len(result) < length:
|
||||
if self.position < len(self.buffer):
|
||||
to_read = length - len(result)
|
||||
chunk = self.buffer[self.position:self.position + to_read]
|
||||
result.extend(chunk)
|
||||
|
||||
# This check is here to prevent potential OOM issues if
|
||||
# this code is called with unreasonably high values of read
|
||||
# size. Currently it is only called from the HTTP clients
|
||||
# of Glance backend stores, which use httplib for data
|
||||
# streaming, which has readsize hardcoded to 8K, so this
|
||||
# check should never fire. Regardless it still worths to
|
||||
# make the check, as the code may be reused somewhere else.
|
||||
if len(result) >= MAX_COOP_READER_BUFFER_SIZE:
|
||||
raise exceptions.LimitExceeded()
|
||||
self.position += len(chunk)
|
||||
else:
|
||||
try:
|
||||
if self.iterator is None:
|
||||
self.iterator = self.__iter__()
|
||||
self.buffer = next(self.iterator)
|
||||
self.position = 0
|
||||
except StopIteration:
|
||||
self.buffer = b''
|
||||
self.position = 0
|
||||
return bytes(result)
|
||||
return bytes(result)
|
||||
|
||||
def __iter__(self):
|
||||
return cooperative_iter(self.fd.__iter__())
|
||||
|
||||
|
||||
class LimitingReader(object):
|
||||
"""Limit Reader to read data past to configured allowed amount.
|
||||
|
||||
Reader designed to fail when reading image data past the configured
|
||||
allowable amount.
|
||||
"""
|
||||
def __init__(self, data, limit,
|
||||
exception_class=exceptions.CSARFileSizeLimitExceeded):
|
||||
"""Construct an LimitingReader object.
|
||||
|
||||
:param data: Underlying image data object
|
||||
:param limit: maximum number of bytes the reader should allow
|
||||
:param exception_class: Type of exception to be raised
|
||||
"""
|
||||
self.data = data
|
||||
self.limit = limit
|
||||
self.bytes_read = 0
|
||||
self.exception_class = exception_class
|
||||
|
||||
def __iter__(self):
|
||||
for chunk in self.data:
|
||||
self.bytes_read += len(chunk)
|
||||
if self.bytes_read > self.limit:
|
||||
raise self.exception_class()
|
||||
else:
|
||||
yield chunk
|
||||
|
||||
def read(self, i):
|
||||
result = self.data.read(i)
|
||||
self.bytes_read += len(result)
|
||||
if self.bytes_read > self.limit:
|
||||
raise self.exception_class()
|
||||
return result
|
||||
|
@ -13,38 +13,111 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
import oslo_messaging
|
||||
from oslo_service import periodic_task
|
||||
from oslo_service import service
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import timeutils
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from tacker.common import csar_utils
|
||||
from tacker.common import exceptions
|
||||
from tacker.common import safe_utils
|
||||
from tacker.common import topics
|
||||
from tacker.common import utils
|
||||
import tacker.conf
|
||||
from tacker import context as t_context
|
||||
from tacker.db.common_services import common_services_db
|
||||
from tacker.db.nfvo import nfvo_db
|
||||
from tacker.extensions import nfvo
|
||||
from tacker.glance_store import store as glance_store
|
||||
from tacker import manager
|
||||
from tacker import objects
|
||||
from tacker.objects import fields
|
||||
from tacker.objects.vnf_package import VnfPackagesList
|
||||
from tacker.plugins.common import constants
|
||||
from tacker import service as tacker_service
|
||||
from tacker import version
|
||||
|
||||
|
||||
CONF = tacker.conf.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _delete_csar(context, vnf_package):
|
||||
# Delete from glance store
|
||||
glance_store.delete_csar(context, vnf_package.id,
|
||||
vnf_package.location_glance_store)
|
||||
|
||||
csar_utils.delete_csar_data(vnf_package.id)
|
||||
|
||||
|
||||
@utils.expects_func_args('vnf_package')
|
||||
def revert_upload_vnf_package(function):
|
||||
"""Decorator to revert upload_vnf_package on failure."""
|
||||
|
||||
@functools.wraps(function)
|
||||
def decorated_function(self, context, *args, **kwargs):
|
||||
try:
|
||||
return function(self, context, *args, **kwargs)
|
||||
except Exception as exp:
|
||||
with excutils.save_and_reraise_exception():
|
||||
wrapped_func = safe_utils.get_wrapped_function(function)
|
||||
keyed_args = inspect.getcallargs(wrapped_func, self, context,
|
||||
*args, **kwargs)
|
||||
context = keyed_args['context']
|
||||
vnf_package = keyed_args['vnf_package']
|
||||
if not isinstance(exp, exceptions.UploadFailedToGlanceStore):
|
||||
# Delete the csar file from the glance store.
|
||||
glance_store.delete_csar(context, vnf_package.id,
|
||||
vnf_package.location_glance_store)
|
||||
|
||||
csar_utils.delete_csar_data(vnf_package.id)
|
||||
|
||||
# Delete the vnf_deployment_flavour if created.
|
||||
if vnf_package.vnf_deployment_flavours:
|
||||
for flavour in vnf_package.vnf_deployment_flavours:
|
||||
flavour.destroy(context)
|
||||
|
||||
# Set the vnf package onboarding status to created,
|
||||
# so that user can retry uploading vnf package
|
||||
# after correcting the csar zip file.
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.CREATED)
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
class Conductor(manager.Manager):
|
||||
def __init__(self, host, conf=None):
|
||||
if conf:
|
||||
self.conf = conf
|
||||
else:
|
||||
self.conf = cfg.CONF
|
||||
self.conf = CONF
|
||||
super(Conductor, self).__init__(host=self.conf.host)
|
||||
|
||||
def init_host(self):
|
||||
glance_store.initialize_glance_store()
|
||||
self._basic_config_check()
|
||||
|
||||
def _basic_config_check(self):
|
||||
if not os.path.isdir(CONF.vnf_package.vnf_package_csar_path):
|
||||
LOG.error("Config option 'vnf_package_csar_path' is not "
|
||||
"configured correctly. VNF package CSAR path directory"
|
||||
" %s doesn't exist",
|
||||
CONF.vnf_package.vnf_package_csar_path)
|
||||
sys.exit(1)
|
||||
|
||||
def update_vim(self, context, vim_id, status):
|
||||
t_admin_context = t_context.get_admin_context()
|
||||
update_time = timeutils.utcnow()
|
||||
@ -67,27 +140,171 @@ class Conductor(manager.Manager):
|
||||
t_admin_context.session.add(event_db)
|
||||
return status
|
||||
|
||||
def _create_software_images(self, context, sw_image, flavour_uuid):
|
||||
vnf_sw_image = objects.VnfSoftwareImage(context=context)
|
||||
vnf_sw_image.flavour_uuid = flavour_uuid
|
||||
vnf_sw_image.name = sw_image.get('name')
|
||||
|
||||
# TODO(nirajsingh) Provider is mandatory as per SOL005 but it's not
|
||||
# a required parameter in SwImageData as per SOL001. SOL001 will be
|
||||
# amended to make `provider` a required parameter as per
|
||||
# 'https://docbox.etsi.org/ISG/NFV/SOL/05-CONTRIBUTIONS/2019/
|
||||
# NFVSOL000338_SOL001ed271_SwImage_Provider.docx'.
|
||||
vnf_sw_image.provider = sw_image.get('provider', "")
|
||||
|
||||
vnf_sw_image.version = sw_image.get('version')
|
||||
if sw_image.get('checksum'):
|
||||
checksum = sw_image.get('checksum')
|
||||
if checksum.get('algorithm'):
|
||||
vnf_sw_image.algorithm = checksum.get('algorithm')
|
||||
if checksum.get('hash'):
|
||||
vnf_sw_image.hash = checksum.get('hash')
|
||||
vnf_sw_image.container_format = sw_image.get('container_format')
|
||||
vnf_sw_image.disk_format = sw_image.get('disk_format')
|
||||
if sw_image.get('min_ram'):
|
||||
min_ram = sw_image.get('min_ram')
|
||||
vnf_sw_image.min_ram = int(min_ram.split()[0])
|
||||
else:
|
||||
vnf_sw_image.min_ram = 0
|
||||
vnf_sw_image.min_disk = int(sw_image.get('min_disk').split()[0])
|
||||
vnf_sw_image.size = int(sw_image.get('size').split()[0])
|
||||
vnf_sw_image.image_path = ''
|
||||
vnf_sw_image.software_image_id = sw_image['software_image_id']
|
||||
vnf_sw_image.metadata = sw_image.get('metadata', dict())
|
||||
vnf_sw_image.create()
|
||||
|
||||
def _create_flavour(self, context, package_uuid, flavour):
|
||||
deploy_flavour = objects.VnfDeploymentFlavour(context=context)
|
||||
deploy_flavour.package_uuid = package_uuid
|
||||
deploy_flavour.flavour_id = flavour['flavour_id']
|
||||
deploy_flavour.flavour_description = flavour['flavour_description']
|
||||
deploy_flavour.instantiation_levels = \
|
||||
flavour['instantiation_levels']
|
||||
deploy_flavour.create()
|
||||
|
||||
sw_images = flavour.get('sw_images')
|
||||
if sw_images:
|
||||
for sw_image in sw_images:
|
||||
self._create_software_images(
|
||||
context, sw_image, deploy_flavour.id)
|
||||
|
||||
def _onboard_vnf_package(self, context, vnf_package, vnf_data, flavours):
|
||||
package_vnfd = objects.VnfPackageVnfd(context=context)
|
||||
package_vnfd.package_uuid = vnf_package.id
|
||||
|
||||
package_vnfd.vnfd_id = vnf_data.get('descriptor_id')
|
||||
package_vnfd.vnf_provider = vnf_data.get('provider')
|
||||
package_vnfd.vnf_product_name = vnf_data.get('product_name')
|
||||
package_vnfd.vnf_software_version = vnf_data.get('software_version')
|
||||
package_vnfd.vnfd_version = vnf_data.get('descriptor_version')
|
||||
package_vnfd.create()
|
||||
|
||||
for flavour in flavours:
|
||||
self._create_flavour(context, vnf_package.id, flavour)
|
||||
|
||||
@revert_upload_vnf_package
|
||||
def upload_vnf_package_content(self, context, vnf_package):
|
||||
location = vnf_package.location_glance_store
|
||||
zip_path = glance_store.load_csar(vnf_package.id, location)
|
||||
vnf_data, flavours = csar_utils.load_csar_data(
|
||||
context.elevated(), vnf_package.id, zip_path)
|
||||
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.ONBOARDED)
|
||||
vnf_package.operational_state = (
|
||||
fields.PackageOperationalStateType.ENABLED)
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
@revert_upload_vnf_package
|
||||
def upload_vnf_package_from_uri(self, context, vnf_package,
|
||||
address_information, user_name=None,
|
||||
password=None):
|
||||
|
||||
body = {"address_information": address_information}
|
||||
(location, size, checksum, multihash,
|
||||
loc_meta) = glance_store.store_csar(context, vnf_package.id, body)
|
||||
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.PROCESSING)
|
||||
|
||||
vnf_package.algorithm = CONF.vnf_package.hashing_algorithm
|
||||
vnf_package.hash = multihash
|
||||
vnf_package.location_glance_store = location
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
zip_path = glance_store.load_csar(vnf_package.id, location)
|
||||
vnf_data, flavours = csar_utils.load_csar_data(
|
||||
context.elevated(), vnf_package.id, zip_path)
|
||||
|
||||
self._onboard_vnf_package(context, vnf_package, vnf_data, flavours)
|
||||
|
||||
vnf_package.onboarding_state = (
|
||||
fields.PackageOnboardingStateType.ONBOARDED)
|
||||
vnf_package.operational_state = (
|
||||
fields.PackageOperationalStateType.ENABLED)
|
||||
|
||||
vnf_package.save()
|
||||
|
||||
def delete_vnf_package(self, context, vnf_package):
|
||||
if (vnf_package.onboarding_state ==
|
||||
fields.PackageOnboardingStateType.ONBOARDED):
|
||||
|
||||
_delete_csar(context, vnf_package)
|
||||
|
||||
vnf_package.destroy(context)
|
||||
|
||||
@periodic_task.periodic_task(spacing=CONF.vnf_package_delete_interval)
|
||||
def _run_cleanup_vnf_packages(self, context):
|
||||
"""Delete orphan extracted csar zip and files from extracted path
|
||||
|
||||
This periodic task will get all deleted packages for the period
|
||||
(now - CONF.vnf_package_delete_interval) and delete any left out
|
||||
csar zip files and vnf packages files from the extracted path.
|
||||
"""
|
||||
|
||||
time_duration = datetime.datetime.utcnow() - datetime.timedelta(
|
||||
seconds=CONF.vnf_package_delete_interval)
|
||||
filters = {'deleted_at': time_duration}
|
||||
deleted_vnf_packages = VnfPackagesList.get_by_filters(
|
||||
context.elevated(), read_deleted='only', **filters)
|
||||
for vnf_pack in deleted_vnf_packages:
|
||||
csar_zip_temp_path = (CONF.vnf_package.vnf_package_csar_path +
|
||||
vnf_pack.id)
|
||||
csar_path = (CONF.vnf_package.vnf_package_csar_path +
|
||||
vnf_pack.id + '.zip')
|
||||
try:
|
||||
if os.path.exists(csar_zip_temp_path):
|
||||
shutil.rmtree(csar_zip_temp_path)
|
||||
os.remove(csar_path)
|
||||
except OSError:
|
||||
LOG.warning("Failed to delete csar zip %(zip)s and"
|
||||
" folder $(folder)s for vnf package %(uuid)s.",
|
||||
{'zip': csar_path, 'folder': csar_zip_temp_path,
|
||||
'uuid': vnf_pack.id})
|
||||
|
||||
|
||||
def init(args, **kwargs):
|
||||
cfg.CONF(args=args, project='tacker',
|
||||
version='%%prog %s' % version.version_info.release_string(),
|
||||
**kwargs)
|
||||
CONF(args=args, project='tacker',
|
||||
version='%%prog %s' % version.version_info.release_string(),
|
||||
**kwargs)
|
||||
|
||||
# FIXME(ihrachys): if import is put in global, circular import
|
||||
# failure occurs
|
||||
from tacker.common import rpc as n_rpc
|
||||
n_rpc.init(cfg.CONF)
|
||||
n_rpc.init(CONF)
|
||||
|
||||
|
||||
def main(manager='tacker.conductor.conductor_server.Conductor'):
|
||||
init(sys.argv[1:])
|
||||
objects.register_all()
|
||||
logging.setup(cfg.CONF, "tacker")
|
||||
logging.setup(CONF, "tacker")
|
||||
oslo_messaging.set_transport_defaults(control_exchange='tacker')
|
||||
logging.setup(cfg.CONF, "tacker")
|
||||
cfg.CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
logging.setup(CONF, "tacker")
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
server = tacker_service.Service.create(
|
||||
binary='tacker-conductor',
|
||||
topic=topics.TOPIC_CONDUCTOR,
|
||||
manager=manager)
|
||||
service.launch(cfg.CONF, server, restart_method='mutate').wait()
|
||||
service.launch(CONF, server, restart_method='mutate').wait()
|
||||
|
@ -13,6 +13,7 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import glance_store
|
||||
from oslo_config import cfg
|
||||
|
||||
from tacker.conf import conductor
|
||||
@ -23,3 +24,4 @@ CONF = cfg.CONF
|
||||
|
||||
vnf_package.register_opts(CONF)
|
||||
conductor.register_opts(CONF)
|
||||
glance_store.register_opts(CONF)
|
||||
|
0
tacker/glance_store/__init__.py
Normal file
0
tacker/glance_store/__init__.py
Normal file
117
tacker/glance_store/store.py
Normal file
117
tacker/glance_store/store.py
Normal file
@ -0,0 +1,117 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
|
||||
import glance_store
|
||||
from glance_store import exceptions as store_exceptions
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import encodeutils
|
||||
from oslo_utils import units
|
||||
from six.moves import urllib
|
||||
|
||||
from tacker.common import exceptions
|
||||
from tacker.common import utils
|
||||
import tacker.conf
|
||||
|
||||
|
||||
CONF = tacker.conf.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_glance_store():
|
||||
"""Initialize glance store."""
|
||||
glance_store.create_stores(CONF)
|
||||
glance_store.verify_default_store()
|
||||
|
||||
|
||||
def get_csar_data_iter(body):
|
||||
try:
|
||||
if isinstance(body, dict):
|
||||
url = body['address_information']
|
||||
data_iter = urllib.request.urlopen(url)
|
||||
else:
|
||||
data_iter = body
|
||||
|
||||
return data_iter
|
||||
except Exception:
|
||||
LOG.warn("Failed to open csar URL: %s", url)
|
||||
raise exceptions.VNFPackageURLInvalid(url=url)
|
||||
|
||||
|
||||
def store_csar(context, package_uuid, body):
|
||||
|
||||
data_iter = get_csar_data_iter(body)
|
||||
|
||||
try:
|
||||
# store CSAR file in glance_store
|
||||
(location, size, checksum, multihash,
|
||||
loc_meta) = glance_store.add_to_backend_with_multihash(
|
||||
CONF, package_uuid,
|
||||
utils.LimitingReader(
|
||||
utils.CooperativeReader(data_iter),
|
||||
CONF.vnf_package.csar_file_size_cap * units.Gi),
|
||||
0,
|
||||
CONF.vnf_package.hashing_algorithm,
|
||||
context=context)
|
||||
except Exception as e:
|
||||
error = encodeutils.exception_to_unicode(e)
|
||||
LOG.warn("Failed to store csar data in glance store for "
|
||||
"package %(uuid)s due to error: %(error)s",
|
||||
{"uuid": package_uuid,
|
||||
"error": error})
|
||||
raise exceptions.UploadFailedToGlanceStore(uuid=package_uuid,
|
||||
error=error)
|
||||
finally:
|
||||
if hasattr(data_iter, 'close'):
|
||||
data_iter.close()
|
||||
|
||||
return location, size, checksum, multihash, loc_meta
|
||||
|
||||
|
||||
def delete_csar(context, package_uuid, location):
|
||||
|
||||
try:
|
||||
glance_store.delete_from_backend(location, context)
|
||||
except store_exceptions.NotFound:
|
||||
LOG.info("Failed to find csar data in glance store for "
|
||||
"package %(uuid)s",
|
||||
{"uuid": package_uuid})
|
||||
|
||||
|
||||
def load_csar(package_uuid, location):
|
||||
zip_path = os.path.join(CONF.vnf_package.vnf_package_csar_path,
|
||||
package_uuid + ".zip")
|
||||
|
||||
try:
|
||||
resp, size = glance_store.backend.get_from_backend(location)
|
||||
except Exception as exp:
|
||||
LOG.info("Failed to get csar data from glance store %(location)s for "
|
||||
"package %(uuid)s",
|
||||
{"location": location, "uuid": package_uuid})
|
||||
|
||||
try:
|
||||
temp_data = open(zip_path, 'wb')
|
||||
for chunk in resp:
|
||||
temp_data.write(chunk)
|
||||
temp_data.close()
|
||||
except Exception as exp:
|
||||
LOG.exception("Exception encountered while tee'ing "
|
||||
"csar '%(package_uuid)s' into csar path %(zip_path)s:"
|
||||
"%(error)s. ", {'package_uuid': package_uuid,
|
||||
'zip_path': zip_path,
|
||||
'error': encodeutils.exception_to_unicode(exp)})
|
||||
|
||||
return zip_path
|
@ -144,6 +144,7 @@ class TackerObjectSerializer(messaging.NoOpSerializer):
|
||||
:returns: A new container of the same type (except set) with
|
||||
items from values having had action applied.
|
||||
"""
|
||||
|
||||
iterable = values.__class__
|
||||
if issubclass(iterable, dict):
|
||||
return iterable(**{k: action_fn(context, v)
|
||||
|
@ -36,3 +36,5 @@ LEASE_EVENT_STATUS = 'DONE'
|
||||
START_LEASE_EVET_TYPE = 'start_lease'
|
||||
LEASE_CHECK_EVENT_TIMEOUT = 300
|
||||
LEASE_CHECK_SLEEP_TIME = 3
|
||||
UUID = 'f26f181d-7891-4720-b022-b074ec1733ef'
|
||||
INVALID_UUID = 'f181d-7891-4720-b022-b074ec3ef'
|
||||
|
BIN
tacker/tests/etc/samples/csar_invalid_instantiation_level.zip
Normal file
BIN
tacker/tests/etc/samples/csar_invalid_instantiation_level.zip
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
tacker/tests/etc/samples/csar_with_multiple_sw_image_data.zip
Normal file
BIN
tacker/tests/etc/samples/csar_with_multiple_sw_image_data.zip
Normal file
Binary file not shown.
BIN
tacker/tests/etc/samples/csar_without_flavour_info.zip
Normal file
BIN
tacker/tests/etc/samples/csar_without_flavour_info.zip
Normal file
Binary file not shown.
Binary file not shown.
BIN
tacker/tests/etc/samples/csar_without_instantiation_level.zip
Normal file
BIN
tacker/tests/etc/samples/csar_without_instantiation_level.zip
Normal file
Binary file not shown.
BIN
tacker/tests/etc/samples/csar_without_sw_image_data.zip
Normal file
BIN
tacker/tests/etc/samples/csar_without_sw_image_data.zip
Normal file
Binary file not shown.
BIN
tacker/tests/etc/samples/csar_without_vnfd_info.zip
Normal file
BIN
tacker/tests/etc/samples/csar_without_vnfd_info.zip
Normal file
Binary file not shown.
BIN
tacker/tests/etc/samples/sample_vnf_package_csar.zip
Normal file
BIN
tacker/tests/etc/samples/sample_vnf_package_csar.zip
Normal file
Binary file not shown.
0
tacker/tests/etc/samples/test_data.zip
Normal file
0
tacker/tests/etc/samples/test_data.zip
Normal file
@ -19,9 +19,11 @@ from blazarclient import client as blazar_client
|
||||
from glanceclient.v2 import client as glance_client
|
||||
from keystoneauth1.identity import v3
|
||||
from keystoneauth1 import session
|
||||
from keystoneclient import adapter
|
||||
from neutronclient.v2_0 import client as neutron_client
|
||||
from novaclient import client as nova_client
|
||||
from oslo_config import cfg
|
||||
from oslo_serialization import jsonutils
|
||||
from tempest.lib import base
|
||||
|
||||
from tacker.plugins.common import constants as evt_constants
|
||||
@ -36,6 +38,40 @@ from tackerclient.v1_0 import client as tacker_client
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
class SessionClient(adapter.Adapter):
|
||||
def request(self, *args, **kwargs):
|
||||
kwargs.setdefault('authenticated', False)
|
||||
kwargs.setdefault('raise_exc', False)
|
||||
|
||||
content_type = kwargs.pop('content_type', None) or 'application/json'
|
||||
|
||||
headers = kwargs.setdefault('headers', {})
|
||||
headers.setdefault('Accept', content_type)
|
||||
|
||||
try:
|
||||
kwargs.setdefault('data', kwargs.pop('body'))
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if kwargs.get('data'):
|
||||
headers.setdefault('Content-Type', content_type)
|
||||
|
||||
return super(SessionClient, self).request(*args, **kwargs)
|
||||
|
||||
def _decode_json(self, response):
|
||||
body = response.text
|
||||
if body:
|
||||
return jsonutils.loads(body)
|
||||
else:
|
||||
return ""
|
||||
|
||||
def do_request(self, url, method, **kwargs):
|
||||
kwargs.setdefault('authenticated', True)
|
||||
resp = self.request(url, method, **kwargs)
|
||||
body = self._decode_json(resp)
|
||||
return resp, body
|
||||
|
||||
|
||||
class BaseTackerTest(base.BaseTestCase):
|
||||
"""Base test case class for all Tacker API tests."""
|
||||
|
||||
@ -50,6 +86,7 @@ class BaseTackerTest(base.BaseTestCase):
|
||||
**kwargs)
|
||||
|
||||
cls.client = cls.tackerclient()
|
||||
cls.http_client = cls.tacker_http_client()
|
||||
cls.h_client = cls.heatclient()
|
||||
|
||||
@classmethod
|
||||
@ -59,9 +96,10 @@ class BaseTackerTest(base.BaseTestCase):
|
||||
return vim_params
|
||||
|
||||
@classmethod
|
||||
def tackerclient(cls):
|
||||
def get_auth_session(cls):
|
||||
vim_params = cls.get_credentials()
|
||||
auth = v3.Password(auth_url=vim_params['auth_url'],
|
||||
auth = v3.Password(
|
||||
auth_url=vim_params['auth_url'],
|
||||
username=vim_params['username'],
|
||||
password=vim_params['password'],
|
||||
project_name=vim_params['project_name'],
|
||||
@ -69,7 +107,19 @@ class BaseTackerTest(base.BaseTestCase):
|
||||
project_domain_name=vim_params['project_domain_name'])
|
||||
verify = 'True' == vim_params.pop('cert_verify', 'False')
|
||||
auth_ses = session.Session(auth=auth, verify=verify)
|
||||
return tacker_client.Client(session=auth_ses)
|
||||
return auth_ses
|
||||
|
||||
@classmethod
|
||||
def tacker_http_client(cls):
|
||||
auth_session = cls.get_auth_session()
|
||||
return SessionClient(session=auth_session,
|
||||
service_type='nfv-orchestration',
|
||||
region_name='RegionOne')
|
||||
|
||||
@classmethod
|
||||
def tackerclient(cls):
|
||||
auth_session = cls.get_auth_session()
|
||||
return tacker_client.Client(session=auth_session)
|
||||
|
||||
@classmethod
|
||||
def novaclient(cls):
|
||||
|
0
tacker/tests/functional/vnfpkgm/__init__.py
Normal file
0
tacker/tests/functional/vnfpkgm/__init__.py
Normal file
134
tacker/tests/functional/vnfpkgm/test_vnf_package.py
Normal file
134
tacker/tests/functional/vnfpkgm/test_vnf_package.py
Normal file
@ -0,0 +1,134 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
|
||||
from tacker.tests.functional import base
|
||||
|
||||
|
||||
class VnfPackageTest(base.BaseTackerTest):
|
||||
|
||||
VNF_PACKAGE_DELETE_TIMEOUT = 120
|
||||
VNF_PACKAGE_UPLOAD_TIMEOUT = 300
|
||||
|
||||
def setUp(self):
|
||||
super(VnfPackageTest, self).setUp()
|
||||
self.base_url = "/vnfpkgm/v1/vnf_packages"
|
||||
|
||||
def _wait_for_delete(self, package_uuid):
|
||||
show_url = self.base_url + "/" + package_uuid
|
||||
timeout = self.VNF_PACKAGE_DELETE_TIMEOUT
|
||||
start_time = int(time.time())
|
||||
while True:
|
||||
resp, body = self.http_client.do_request(show_url, "GET")
|
||||
if (404 == resp.status_code):
|
||||
break
|
||||
if (int(time.time()) - start_time) > timeout:
|
||||
raise Exception("Failed to delete package")
|
||||
time.sleep(1)
|
||||
|
||||
def _wait_for_onboard(self, package_uuid):
|
||||
show_url = self.base_url + "/" + package_uuid
|
||||
timeout = self.VNF_PACKAGE_UPLOAD_TIMEOUT
|
||||
start_time = int(time.time())
|
||||
while True:
|
||||
resp, body = self.http_client.do_request(show_url, "GET")
|
||||
if body['onboardingState'] == "ONBOARDED":
|
||||
break
|
||||
|
||||
if ((int(time.time()) - start_time) > timeout):
|
||||
raise Exception("Failed to onboard vnf package")
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
def _create_vnf_package(self, body):
|
||||
resp, response_body = self.http_client.do_request(self.base_url,
|
||||
"POST", body=body)
|
||||
self.assertIsNotNone(response_body['id'])
|
||||
self.assertEqual(201, resp.status_code)
|
||||
return response_body
|
||||
|
||||
def _delete_vnf_package(self, package_uuid):
|
||||
url = self.base_url + "/" + package_uuid
|
||||
resp, body = self.http_client.do_request(url, "DELETE")
|
||||
self.assertEqual(204, resp.status_code)
|
||||
|
||||
def test_create_show_delete_vnf_package(self):
|
||||
"""Creates and deletes a vnf package."""
|
||||
|
||||
# Create vnf package
|
||||
body = jsonutils.dumps({"userDefinedData": {"foo": "bar"}})
|
||||
vnf_package = self._create_vnf_package(body)
|
||||
package_uuid = vnf_package['id']
|
||||
|
||||
# show vnf package
|
||||
show_url = self.base_url + "/" + package_uuid
|
||||
resp, body = self.http_client.do_request(show_url, "GET")
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
# Delete vnf package
|
||||
self._delete_vnf_package(package_uuid)
|
||||
self._wait_for_delete(package_uuid)
|
||||
|
||||
# show vnf package should fail as it's deleted
|
||||
resp, body = self.http_client.do_request(show_url, "GET")
|
||||
self.assertEqual(404, resp.status_code)
|
||||
|
||||
def test_list(self):
|
||||
vnf_package_list = []
|
||||
body = jsonutils.dumps({"userDefinedData": {"foo": "bar"}})
|
||||
|
||||
# create two vnf packages
|
||||
vnf_package = self._create_vnf_package(body)
|
||||
self.addCleanup(self._delete_vnf_package, vnf_package['id'])
|
||||
vnf_package_list.append(vnf_package['id'])
|
||||
|
||||
vnf_package = self._create_vnf_package(body)
|
||||
vnf_package_list.append(vnf_package['id'])
|
||||
self.addCleanup(self._delete_vnf_package, vnf_package['id'])
|
||||
|
||||
# list vnf package
|
||||
resp, body = self.http_client.do_request(self.base_url, "GET")
|
||||
self.assertEqual(200, resp.status_code)
|
||||
|
||||
package_uuids = [obj['id'] for obj in body['vnf_packages']]
|
||||
self.assertIn(vnf_package_list[0], package_uuids)
|
||||
self.assertIn(vnf_package_list[1], package_uuids)
|
||||
|
||||
def _get_csar_file_path(self, file_name):
|
||||
file_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
|
||||
'../../etc/samples/' + file_name))
|
||||
return file_path
|
||||
|
||||
def test_upload_from_file_and_delete(self):
|
||||
body = jsonutils.dumps({"userDefinedData": {"foo": "bar"}})
|
||||
vnf_package = self._create_vnf_package(body)
|
||||
file_path = self._get_csar_file_path("sample_vnf_package_csar.zip")
|
||||
with open(file_path, 'r') as file_object:
|
||||
resp, resp_body = self.http_client.do_request(
|
||||
'{base_path}/{id}/package_content'.format(
|
||||
id=vnf_package['id'],
|
||||
base_path=self.base_url),
|
||||
"PUT", body=file_object, content_type='application/zip')
|
||||
|
||||
self.assertEqual(202, resp.status_code)
|
||||
|
||||
self._wait_for_onboard(vnf_package['id'])
|
||||
|
||||
self._delete_vnf_package(vnf_package['id'])
|
||||
self._wait_for_delete(vnf_package['id'])
|
164
tacker/tests/unit/common/test_csar_utils.py
Normal file
164
tacker/tests/unit/common/test_csar_utils.py
Normal file
@ -0,0 +1,164 @@
|
||||
# Copyright (c) 2019 NTT DATA.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import testtools
|
||||
|
||||
from tacker.common import csar_utils
|
||||
from tacker.common import exceptions
|
||||
from tacker import context
|
||||
from tacker.tests import constants
|
||||
|
||||
|
||||
class TestCSARUtils(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCSARUtils, self).setUp()
|
||||
self.context = context.get_admin_context()
|
||||
self.base_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data(self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path, "../../etc/samples/sample_vnf_package_csar.zip")
|
||||
vnf_data, flavours = csar_utils.load_csar_data(
|
||||
self.context, constants.UUID, file_path)
|
||||
self.assertEqual(vnf_data['descriptor_version'], '1.0')
|
||||
self.assertEqual(vnf_data['vnfm_info'], ['Tacker'])
|
||||
self.assertEqual(flavours[0]['flavour_id'], 'simple')
|
||||
self.assertIsNotNone(flavours[0]['sw_images'])
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_without_instantiation_level(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_without_instantiation_level.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
msg = ('Policy of type'
|
||||
' "tosca.policies.nfv.InstantiationLevels is not defined.')
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_with_invalid_instantiation_level(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_invalid_instantiation_level.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
levels = ['instantiation_level_1', 'instantiation_level_2']
|
||||
msg = ("Level(s) instantiation_level_3 not found in "
|
||||
"defined levels %s") % ",".join(sorted(levels))
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_with_invalid_default_instantiation_level(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_with_invalid_"
|
||||
"default_instantiation_level.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
levels = ['instantiation_level_1', 'instantiation_level_2']
|
||||
msg = ("Level instantiation_level_3 not found in "
|
||||
"defined levels %s") % ",".join(sorted(levels))
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_without_vnfd_info(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_without_vnfd_info.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
self.assertEqual("VNF properties are mandatory", exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_with_artifacts_and_without_sw_image_data(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_without_sw_image_data.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
msg = ('Node property "sw_image_data" is missing for artifact'
|
||||
' type tosca.artifacts.nfv.SwImage for node VDU1.')
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_with_multiple_sw_image_data(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/csar_with_multiple_sw_image_data.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
msg = ('artifacts of type "tosca.artifacts.nfv.SwImage"'
|
||||
' is added more than one time for node VDU1.')
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_csar_with_missing_sw_image_data_in_main_template(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path,
|
||||
"../../etc/samples/"
|
||||
"csar_with_missing_sw_image_data_in_main_template.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
msg = ('Node property "sw_image_data" is missing for artifact'
|
||||
' type tosca.artifacts.nfv.SwImage for node VDU1.')
|
||||
self.assertEqual(msg, exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_without_flavour_info(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path, "../../etc/samples/csar_without_flavour_info.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
self.assertEqual("No VNF flavours are available", exc.format_message())
|
||||
|
||||
@mock.patch('tacker.common.csar_utils._extract_csar_zip_file')
|
||||
def test_load_csar_data_without_flavour_info_in_main_template(
|
||||
self, mock_extract_csar_zip_file):
|
||||
file_path = os.path.join(
|
||||
self.base_path, "../../etc/samples/"
|
||||
"csar_without_flavour_info_in_main_template.zip")
|
||||
exc = self.assertRaises(exceptions.InvalidCSAR,
|
||||
csar_utils.load_csar_data,
|
||||
self.context, constants.UUID, file_path)
|
||||
self.assertEqual("No VNF flavours are available",
|
||||
exc.format_message())
|
||||
|
||||
@mock.patch.object(os, 'remove')
|
||||
@mock.patch.object(shutil, 'rmtree')
|
||||
def test_delete_csar_data(self, mock_rmtree, mock_remove):
|
||||
csar_utils.delete_csar_data(constants.UUID)
|
||||
mock_rmtree.assert_called()
|
||||
mock_remove.assert_called()
|
@ -40,3 +40,12 @@ VNF_DATA = {
|
||||
'usage_state': 'NOT_IN_USE',
|
||||
'user_data': {'abc': 'xyz'}
|
||||
}
|
||||
|
||||
VNF_PACKAGE_DATA = {'algorithm': None, 'hash': None,
|
||||
'location_glance_store': None,
|
||||
'onboarding_state': 'CREATED',
|
||||
'operational_state': 'DISABLED',
|
||||
'tenant_id': uuidsentinel.tenant_id,
|
||||
'usage_state': 'NOT_IN_USE',
|
||||
'user_data': {'abc': 'xyz'}
|
||||
}
|
||||
|
135
tacker/tests/unit/conductor/test_conductor_server.py
Normal file
135
tacker/tests/unit/conductor/test_conductor_server.py
Normal file
@ -0,0 +1,135 @@
|
||||
# Copyright (c) 2019 NTT DATA
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from tacker.common import csar_utils
|
||||
from tacker.conductor import conductor_server
|
||||
from tacker import context
|
||||
from tacker.glance_store import store as glance_store
|
||||
from tacker import objects
|
||||
from tacker.objects import vnf_package
|
||||
from tacker.tests.unit.conductor import fakes
|
||||
from tacker.tests.unit.db.base import SqlTestCase
|
||||
from tacker.tests import uuidsentinel
|
||||
|
||||
|
||||
class TestConductor(SqlTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestConductor, self).setUp()
|
||||
self.context = context.get_admin_context()
|
||||
self.conductor = conductor_server.Conductor('host')
|
||||
self.vnf_package = self._create_vnf_package()
|
||||
|
||||
def _create_vnf_package(self):
|
||||
vnfpkgm = vnf_package.VnfPackage(context=self.context,
|
||||
**fakes.VNF_PACKAGE_DATA)
|
||||
vnfpkgm.create()
|
||||
return vnfpkgm
|
||||
|
||||
@mock.patch.object(conductor_server.Conductor, '_onboard_vnf_package')
|
||||
@mock.patch.object(conductor_server, 'revert_upload_vnf_package')
|
||||
@mock.patch.object(csar_utils, 'load_csar_data')
|
||||
@mock.patch.object(glance_store, 'load_csar')
|
||||
def test_upload_vnf_package_content(self, mock_load_csar,
|
||||
mock_load_csar_data,
|
||||
mock_revert, mock_onboard):
|
||||
mock_load_csar_data.return_value = (mock.ANY, mock.ANY)
|
||||
mock_load_csar.return_value = '/var/lib/tacker/5f5d99c6-844a-4c3' \
|
||||
'1-9e6d-ab21b87dcfff.zip'
|
||||
self.conductor.upload_vnf_package_content(
|
||||
self.context, self.vnf_package)
|
||||
mock_load_csar.assert_called()
|
||||
mock_load_csar_data.assert_called()
|
||||
mock_onboard.assert_called()
|
||||
|
||||
@mock.patch.object(conductor_server.Conductor, '_onboard_vnf_package')
|
||||
@mock.patch.object(glance_store, 'store_csar')
|
||||
@mock.patch.object(conductor_server, 'revert_upload_vnf_package')
|
||||
@mock.patch.object(csar_utils, 'load_csar_data')
|
||||
@mock.patch.object(glance_store, 'load_csar')
|
||||
def test_upload_vnf_package_from_uri(self, mock_load_csar,
|
||||
mock_load_csar_data,
|
||||
mock_revert, mock_store,
|
||||
mock_onboard):
|
||||
address_information = "http://test.zip"
|
||||
mock_load_csar_data.return_value = (mock.ANY, mock.ANY)
|
||||
mock_load_csar.return_value = '/var/lib/tacker/5f5d99c6-844a' \
|
||||
'-4c31-9e6d-ab21b87dcfff.zip'
|
||||
mock_store.return_value = 'location', 'size', 'checksum',\
|
||||
'multihash', 'loc_meta'
|
||||
self.conductor.upload_vnf_package_from_uri(self.context,
|
||||
self.vnf_package,
|
||||
address_information,
|
||||
user_name=None,
|
||||
password=None)
|
||||
mock_load_csar.assert_called()
|
||||
mock_load_csar_data.assert_called()
|
||||
mock_store.assert_called()
|
||||
mock_onboard.assert_called()
|
||||
self.assertEqual('multihash', self.vnf_package.hash)
|
||||
self.assertEqual('location', self.vnf_package.location_glance_store)
|
||||
|
||||
@mock.patch.object(glance_store, 'delete_csar')
|
||||
def test_delete_vnf_package(self, mock_delete_csar):
|
||||
self.vnf_package.__setattr__('onboarding_state', 'ONBOARDED')
|
||||
self.conductor.delete_vnf_package(self.context, self.vnf_package)
|
||||
mock_delete_csar.assert_called()
|
||||
|
||||
@mock.patch.object(os, 'remove')
|
||||
@mock.patch.object(shutil, 'rmtree')
|
||||
@mock.patch.object(os.path, 'exists')
|
||||
@mock.patch.object(vnf_package.VnfPackagesList, 'get_by_filters')
|
||||
def test_run_cleanup_vnf_packages(self, mock_get_by_filter,
|
||||
mock_exists, mock_rmtree,
|
||||
mock_remove):
|
||||
vnf_package_data = {'algorithm': None, 'hash': None,
|
||||
'location_glance_store': None,
|
||||
'onboarding_state': 'CREATED',
|
||||
'operational_state': 'DISABLED',
|
||||
'tenant_id': uuidsentinel.tenant_id,
|
||||
'usage_state': 'NOT_IN_USE',
|
||||
'user_data': {'abc': 'xyz'}
|
||||
}
|
||||
|
||||
vnfpkgm = objects.VnfPackage(context=self.context, **vnf_package_data)
|
||||
vnfpkgm.create()
|
||||
vnfpkgm.destroy(self.context)
|
||||
|
||||
mock_get_by_filter.return_value = [vnfpkgm]
|
||||
mock_exists.return_value = True
|
||||
conductor_server.Conductor('host')._run_cleanup_vnf_packages(
|
||||
self.context)
|
||||
mock_get_by_filter.assert_called()
|
||||
mock_rmtree.assert_called()
|
||||
mock_remove.assert_called()
|
||||
|
||||
@mock.patch.object(sys, 'exit')
|
||||
@mock.patch.object(conductor_server.LOG, 'error')
|
||||
@mock.patch.object(glance_store, 'initialize_glance_store')
|
||||
@mock.patch.object(os.path, 'isdir')
|
||||
def test_init_host(self, mock_isdir, mock_initialize_glance_store,
|
||||
mock_log_error, mock_exit):
|
||||
mock_isdir.return_value = False
|
||||
self.conductor.init_host()
|
||||
mock_log_error.assert_called()
|
||||
mock_exit.assert_called_with(1)
|
||||
self.assertIn("Config option 'vnf_package_csar_path' is not configured"
|
||||
" correctly. VNF package CSAR path directory %s doesn't"
|
||||
" exist", mock_log_error.call_args[0][0])
|
94
tacker/tests/unit/db/test_vnf_package.py
Normal file
94
tacker/tests/unit/db/test_vnf_package.py
Normal file
@ -0,0 +1,94 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from tacker.common import exceptions
|
||||
from tacker import context
|
||||
from tacker.db.db_sqlalchemy import models
|
||||
from tacker import objects
|
||||
from tacker.objects import vnf_package
|
||||
from tacker.tests.unit.db.base import SqlTestCase
|
||||
from tacker.tests.unit.objects import fakes as fake_data
|
||||
from tacker.tests.unit.vnfpkgm import fakes
|
||||
|
||||
|
||||
class TestVnfPackage(SqlTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestVnfPackage, self).setUp()
|
||||
self.context = context.get_admin_context()
|
||||
self.vnf_package = self._create_vnf_package()
|
||||
self.vnf_deployment_flavour = self._create_vnf_deployment_flavour()
|
||||
|
||||
def _create_vnf_package(self):
|
||||
vnfpkgm = objects.VnfPackage(context=self.context,
|
||||
**fake_data.vnf_package_data)
|
||||
vnfpkgm.create()
|
||||
return vnfpkgm
|
||||
|
||||
def _create_vnf_deployment_flavour(self):
|
||||
flavour_data = fake_data.vnf_deployment_flavour
|
||||
flavour_data.update({'package_uuid': self.vnf_package.id})
|
||||
vnf_deployment_flavour = objects.VnfDeploymentFlavour(
|
||||
context=self.context, **flavour_data)
|
||||
vnf_deployment_flavour.create()
|
||||
return vnf_deployment_flavour
|
||||
|
||||
def test_add_user_defined_data(self):
|
||||
vnf_package_db = models.VnfPackage()
|
||||
vnf_package_db.update(fakes.fake_vnf_package())
|
||||
vnf_package_db.save(self.context.session)
|
||||
result = vnf_package._add_user_defined_data(
|
||||
self.context, vnf_package_db.id, vnf_package_db.user_data)
|
||||
self.assertEqual(None, result)
|
||||
|
||||
def test_vnf_package_get_by_id(self):
|
||||
result = vnf_package._vnf_package_get_by_id(
|
||||
self.context, self.vnf_package.id,
|
||||
columns_to_join=['vnf_deployment_flavours'])
|
||||
self.assertEqual(self.vnf_package.id, result.id)
|
||||
self.assertTrue(result.vnf_deployment_flavours)
|
||||
|
||||
def test_vnf_package_create(self):
|
||||
result = vnf_package._vnf_package_create(self.context,
|
||||
fakes.fake_vnf_package())
|
||||
self.assertTrue(result.id)
|
||||
|
||||
def test_vnf_package_list(self):
|
||||
result = vnf_package._vnf_package_list(
|
||||
self.context, columns_to_join=['vnf_deployment_flavours'])
|
||||
self.assertTrue(isinstance(result, list))
|
||||
self.assertTrue(result)
|
||||
|
||||
def test_vnf_package_update(self):
|
||||
update = {'user_data': {'test': 'xyz'}}
|
||||
result = vnf_package._vnf_package_update(
|
||||
self.context, self.vnf_package.id, update)
|
||||
self.assertEqual({'test': 'xyz'}, result.user_data)
|
||||
|
||||
def test_destroy_vnf_package(self):
|
||||
vnf_package._destroy_vnf_package(self.context,
|
||||
self.vnf_package.id)
|
||||
self.assertRaises(
|
||||
exceptions.VnfPackageNotFound,
|
||||
objects.VnfPackage.get_by_id, self.context,
|
||||
self.vnf_package.id)
|
||||
|
||||
def test_make_vnf_packages_list(self):
|
||||
response = vnf_package._vnf_package_list(self.context)
|
||||
vnf_pack_list_obj = objects.VnfPackagesList(self.context)
|
||||
result = vnf_package._make_vnf_packages_list(
|
||||
self.context, vnf_pack_list_obj, response, None)
|
||||
self.assertTrue(isinstance(result, objects.VnfPackagesList))
|
||||
self.assertTrue(result.objects[0].id)
|
43
tacker/tests/unit/fake_request.py
Normal file
43
tacker/tests/unit/fake_request.py
Normal file
@ -0,0 +1,43 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import webob
|
||||
import webob.request
|
||||
|
||||
from tacker import context
|
||||
from tacker.tests import uuidsentinel
|
||||
from tacker import wsgi as os_wsgi
|
||||
|
||||
|
||||
class FakeRequestContext(context.ContextBaseWithSession):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['auth_token'] = kwargs.get(
|
||||
uuidsentinel.user_id, uuidsentinel.project_id)
|
||||
super(FakeRequestContext, self).__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class HTTPRequest(webob.Request):
|
||||
|
||||
@classmethod
|
||||
def blank(cls, *args, **kwargs):
|
||||
kwargs['base_url'] = 'http://localhost/'
|
||||
use_admin_context = kwargs.pop(context.get_admin_context(), True)
|
||||
out = os_wsgi.Request.blank(*args, **kwargs)
|
||||
out.environ['tacker.context'] = FakeRequestContext(
|
||||
uuidsentinel.user_id,
|
||||
uuidsentinel.project_id,
|
||||
is_admin=use_admin_context)
|
||||
return out
|
@ -35,12 +35,12 @@ class TestToscaUtils(testtools.TestCase):
|
||||
tosca_openwrt = _get_template('test_tosca_openwrt.yaml')
|
||||
vnfd_dict = yaml.safe_load(tosca_openwrt)
|
||||
toscautils.updateimports(vnfd_dict)
|
||||
tosca = tosca_template.ToscaTemplate(parsed_params={}, a_file=False,
|
||||
yaml_dict_tpl=vnfd_dict)
|
||||
tosca_flavor = _get_template('test_tosca_flavor.yaml')
|
||||
|
||||
def setUp(self):
|
||||
super(TestToscaUtils, self).setUp()
|
||||
self.tosca = tosca_template.ToscaTemplate(
|
||||
parsed_params={}, a_file=False, yaml_dict_tpl=self.vnfd_dict)
|
||||
self.tosca_flavor = _get_template('test_tosca_flavor.yaml')
|
||||
|
||||
def test_updateimport(self):
|
||||
importspath = os.path.abspath('./tacker/tosca/lib/')
|
||||
|
0
tacker/tests/unit/vnfpkgm/__init__.py
Normal file
0
tacker/tests/unit/vnfpkgm/__init__.py
Normal file
126
tacker/tests/unit/vnfpkgm/fakes.py
Normal file
126
tacker/tests/unit/vnfpkgm/fakes.py
Normal file
@ -0,0 +1,126 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import datetime
|
||||
import iso8601
|
||||
import webob
|
||||
|
||||
from tacker.api.vnfpkgm.v1.router import VnfpkgmAPIRouter
|
||||
from tacker import context
|
||||
from tacker.db.db_sqlalchemy import models
|
||||
from tacker.objects import vnf_package as vnf_package_obj
|
||||
from tacker.tests import constants
|
||||
from tacker.tests import uuidsentinel
|
||||
from tacker import wsgi
|
||||
|
||||
|
||||
VNFPACKAGE_RESPONSE = {'_links': {
|
||||
'packageContent': {
|
||||
'href':
|
||||
'/vnfpkgm/v1/vnf_packages/'
|
||||
'f26f181d-7891-4720-b022-b074ec1733ef/package_content'},
|
||||
'self': {
|
||||
'href':
|
||||
'/vnfpkgm/v1/vnf_packages/'
|
||||
'f26f181d-7891-4720-b022-b074ec1733ef'},
|
||||
},
|
||||
'id': 'f26f181d-7891-4720-b022-b074ec1733ef',
|
||||
'onboardingState': 'CREATED',
|
||||
'operationalState': 'DISABLED',
|
||||
'usageState': 'NOT_IN_USE',
|
||||
'userDefinedData': {'abc': 'xyz'}
|
||||
}
|
||||
|
||||
VNFPACKAGE_INDEX_RESPONSE = {'vnf_packages': [{'_links': {
|
||||
'packageContent': {
|
||||
'href':
|
||||
'/vnfpkgm/v1/vnf_packages/'
|
||||
'f26f181d-7891-4720-b022-b074ec1733ef/package_content'},
|
||||
'self': {
|
||||
'href': '/vnfpkgm/v1/vnf_packages/'
|
||||
'f26f181d-7891-4720-b022-b074ec1733ef'}},
|
||||
'id': 'f26f181d-7891-4720-b022-b074ec1733ef',
|
||||
'onboardingState': 'CREATED',
|
||||
'operationalState': 'DISABLED',
|
||||
'usageState': 'NOT_IN_USE',
|
||||
'userDefinedData': {}}]
|
||||
}
|
||||
|
||||
|
||||
def fake_vnf_package(**updates):
|
||||
vnf_package = {
|
||||
'algorithm': None,
|
||||
'deleted': False,
|
||||
'deleted_at': None,
|
||||
'updated_at': None,
|
||||
'created_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
|
||||
tzinfo=iso8601.UTC),
|
||||
'hash': None,
|
||||
'location_glance_store': None,
|
||||
'onboarding_state': 'CREATED',
|
||||
'operational_state': 'DISABLED',
|
||||
'tenant_id': uuidsentinel.tenant_id,
|
||||
'usage_state': 'NOT_IN_USE',
|
||||
'user_data': {'abc': 'xyz'},
|
||||
'id': constants.UUID,
|
||||
}
|
||||
|
||||
if updates:
|
||||
vnf_package.update(updates)
|
||||
|
||||
return vnf_package
|
||||
|
||||
|
||||
class InjectContext(wsgi.Middleware):
|
||||
"""Add a 'tacker.context' to WSGI environ."""
|
||||
|
||||
def __init__(self, context, *args, **kwargs):
|
||||
self.context = context
|
||||
super(InjectContext, self).__init__(*args, **kwargs)
|
||||
|
||||
@webob.dec.wsgify(RequestClass=wsgi.Request)
|
||||
def __call__(self, req):
|
||||
req.environ['tacker.context'] = self.context
|
||||
return self.application
|
||||
|
||||
|
||||
def return_vnf_package():
|
||||
model_obj = models.VnfPackage()
|
||||
model_obj.update(fake_vnf_package())
|
||||
return model_obj
|
||||
|
||||
|
||||
def return_vnfpkg_obj():
|
||||
vnf_package = vnf_package_obj.VnfPackage._from_db_object(
|
||||
context, vnf_package_obj.VnfPackage(),
|
||||
return_vnf_package(), expected_attrs=None)
|
||||
return vnf_package
|
||||
|
||||
|
||||
def return_vnf_package_list():
|
||||
vnf_package = return_vnfpkg_obj()
|
||||
return [vnf_package]
|
||||
|
||||
|
||||
def wsgi_app_v1(fake_auth_context=None):
|
||||
inner_app_v1 = VnfpkgmAPIRouter()
|
||||
if fake_auth_context is not None:
|
||||
ctxt = fake_auth_context
|
||||
else:
|
||||
ctxt = context.ContextBase(uuidsentinel.user_id,
|
||||
uuidsentinel.project_id, is_admin=True)
|
||||
api_v1 = InjectContext(ctxt, inner_app_v1)
|
||||
return api_v1
|
258
tacker/tests/unit/vnfpkgm/test_controller.py
Normal file
258
tacker/tests/unit/vnfpkgm/test_controller.py
Normal file
@ -0,0 +1,258 @@
|
||||
# Copyright (C) 2019 NTT DATA
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import mock
|
||||
from oslo_serialization import jsonutils
|
||||
from six.moves import http_client
|
||||
from six.moves import urllib
|
||||
from webob import exc
|
||||
|
||||
from tacker.api.vnfpkgm.v1 import controller
|
||||
from tacker.conductor.conductorrpc.vnf_pkgm_rpc import VNFPackageRPCAPI
|
||||
from tacker.glance_store import store as glance_store
|
||||
from tacker.objects import vnf_package
|
||||
from tacker.objects.vnf_package import VnfPackagesList
|
||||
from tacker.tests import constants
|
||||
from tacker.tests.unit import base
|
||||
from tacker.tests.unit import fake_request
|
||||
from tacker.tests.unit.vnfpkgm import fakes
|
||||
|
||||
|
||||
class TestController(base.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestController, self).setUp()
|
||||
self.controller = controller.VnfPkgmController()
|
||||
|
||||
@property
|
||||
def app(self):
|
||||
return fakes.wsgi_app_v1()
|
||||
|
||||
@mock.patch.object(vnf_package, '_vnf_package_create')
|
||||
@mock.patch.object(vnf_package.VnfPackage, '_from_db_object')
|
||||
def test_create_with_status_202(self, mock_from_db, mock_vnf_pack):
|
||||
body = {'userDefinedData': {'abc': 'xyz'}}
|
||||
req = fake_request.HTTPRequest.blank('/vnf_packages')
|
||||
req.body = jsonutils.dump_as_bytes(body)
|
||||
req.headers['Content-Type'] = 'application/json'
|
||||
req.method = 'POST'
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(http_client.CREATED, resp.status_code)
|
||||
|
||||
@mock.patch.object(vnf_package, '_vnf_package_create')
|
||||
@mock.patch.object(vnf_package.VnfPackage, '_from_db_object')
|
||||
def test_create_without_userdefine_data(self, mock_from_db,
|
||||
mock_vnf_pack):
|
||||
body = {'userDefinedData': {}}
|
||||
req = fake_request.HTTPRequest.blank('/vnf_packages')
|
||||
req.body = jsonutils.dump_as_bytes(body)
|
||||
req.headers['Content-Type'] = 'application/json'
|
||||
req.method = 'POST'
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(http_client.CREATED, resp.status_code)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_show(self, mock_vnf_by_id):
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnfpkgm/v1/vnf_packages/%s' % constants.UUID)
|
||||
mock_vnf_by_id.return_value = fakes.return_vnf_package()
|
||||
expected_result = fakes.VNFPACKAGE_RESPONSE
|
||||
res_dict = self.controller.show(req, constants.UUID)
|
||||
self.assertEqual(expected_result, res_dict)
|
||||
|
||||
def test_show_with_invalid_uuid(self):
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnfpkgm/v1/vnf_packages/%s' % constants.INVALID_UUID)
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.show,
|
||||
req, constants.INVALID_UUID)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_show_no_vnf_package(self, mock_vnf_by_id):
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnfpkgm/v1/vnf_packages/%s' % constants.UUID)
|
||||
msg = _("Can not find requested vnf package: %s") % constants.UUID
|
||||
mock_vnf_by_id.side_effect = exc.HTTPNotFound(explanation=msg)
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.show,
|
||||
req, constants.UUID)
|
||||
|
||||
@mock.patch.object(VnfPackagesList, "get_all")
|
||||
def test_index(self, mock_vnf_list):
|
||||
req = fake_request.HTTPRequest.blank('/vnfpkgm/v1/vnf_packages/')
|
||||
mock_vnf_list.return_value = fakes.return_vnf_package_list()
|
||||
res_dict = self.controller.index(req)
|
||||
expected_result = fakes.VNFPACKAGE_INDEX_RESPONSE
|
||||
self.assertEqual(expected_result, res_dict)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
@mock.patch.object(VNFPackageRPCAPI, "delete_vnf_package")
|
||||
def test_delete_with_204_status(self, mock_delete_rpc, mock_vnf_by_id):
|
||||
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s' % constants.UUID)
|
||||
req.headers['Content-Type'] = 'application/json'
|
||||
req.method = 'DELETE'
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(http_client.NO_CONTENT, resp.status_code)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_delete_no_vnf_package(self, mock_vnf_by_id):
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnfpkgm/v1/vnf_packages/%s' % constants.UUID)
|
||||
msg = _("Can not find requested vnf package: %s") % constants.UUID
|
||||
mock_vnf_by_id.side_effect = exc.HTTPNotFound(explanation=msg)
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.delete,
|
||||
req, constants.UUID)
|
||||
|
||||
def test_delete_with_invalid_uuid(self):
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnfpkgm/v1/vnf_packages/%s' % constants.INVALID_UUID)
|
||||
self.assertRaises(exc.HTTPNotFound, self.controller.delete,
|
||||
req, constants.INVALID_UUID)
|
||||
|
||||
@mock.patch.object(glance_store, 'store_csar')
|
||||
@mock.patch.object(VNFPackageRPCAPI, "upload_vnf_package_content")
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
@mock.patch.object(vnf_package.VnfPackage, "save")
|
||||
def test_upload_vnf_package_content(self, mock_vnf_pack_save,
|
||||
mock_vnf_by_id,
|
||||
mock_upload_vnf_package_content,
|
||||
mock_glance_store):
|
||||
file_path = "tacker/tests/etc/samples/test_data.zip"
|
||||
file_obj = open(file_path, "rb")
|
||||
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
|
||||
mock_vnf_pack_save.return_value = fakes.return_vnfpkg_obj()
|
||||
mock_glance_store.return_value = 'location', 'size', 'checksum',\
|
||||
'multihash', 'loc_meta'
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content'
|
||||
% constants.UUID)
|
||||
req.headers['Content-Type'] = 'application/zip'
|
||||
req.method = 'PUT'
|
||||
req.body = jsonutils.dump_as_bytes(file_obj)
|
||||
resp = req.get_response(self.app)
|
||||
mock_glance_store.assert_called()
|
||||
self.assertEqual(http_client.ACCEPTED, resp.status_code)
|
||||
|
||||
def test_upload_vnf_package_content_with_invalid_uuid(self):
|
||||
file_path = "tacker/tests/etc/samples/test_data.zip"
|
||||
file_obj = open(file_path, "rb")
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content'
|
||||
% constants.INVALID_UUID)
|
||||
exception = self.assertRaises(exc.HTTPNotFound,
|
||||
self.controller.upload_vnf_package_content,
|
||||
req, constants.INVALID_UUID, body=file_obj)
|
||||
self.assertEqual(
|
||||
"Can not find requested vnf package: %s" % constants.INVALID_UUID,
|
||||
exception.explanation)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_upload_vnf_package_content_without_vnf_pack(self,
|
||||
mock_vnf_by_id):
|
||||
file_path = "tacker/tests/etc/samples/test_data.zip"
|
||||
file_obj = open(file_path, "rb")
|
||||
msg = _("Can not find requested vnf package: %s") % constants.UUID
|
||||
mock_vnf_by_id.side_effect = exc.HTTPNotFound(explanation=msg)
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content' % constants.UUID)
|
||||
exception = self.assertRaises(
|
||||
exc.HTTPNotFound, self.controller.upload_vnf_package_content,
|
||||
req, constants.UUID, body=file_obj)
|
||||
self.assertEqual(
|
||||
"Can not find requested vnf package: %s" % constants.UUID,
|
||||
exception.explanation)
|
||||
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_upload_vnf_package_content_with_invalid_status(self,
|
||||
mock_vnf_by_id):
|
||||
file_path = "tacker/tests/etc/samples/test_data.zip"
|
||||
file_obj = open(file_path, "rb")
|
||||
vnf_obj = fakes.return_vnfpkg_obj()
|
||||
vnf_obj.__setattr__('onboarding_state', 'test')
|
||||
mock_vnf_by_id.return_value = vnf_obj
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content' % constants.UUID)
|
||||
self.assertRaises(exc.HTTPConflict,
|
||||
self.controller.upload_vnf_package_content,
|
||||
req, constants.UUID, body=file_obj)
|
||||
|
||||
@mock.patch.object(urllib.request, 'urlopen')
|
||||
@mock.patch.object(VNFPackageRPCAPI, "upload_vnf_package_from_uri")
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
@mock.patch.object(vnf_package.VnfPackage, "save")
|
||||
def test_upload_vnf_package_from_uri(self, mock_vnf_pack_save,
|
||||
mock_vnf_by_id,
|
||||
mock_upload_vnf_package_from_uri,
|
||||
mock_url_open):
|
||||
body = {"addressInformation": "http://test_data.zip"}
|
||||
mock_vnf_by_id.return_value = fakes.return_vnfpkg_obj()
|
||||
mock_vnf_pack_save.return_value = fakes.return_vnfpkg_obj()
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content/upload_from_uri'
|
||||
% constants.UUID)
|
||||
req.headers['Content-Type'] = 'application/json'
|
||||
req.method = 'POST'
|
||||
req.body = jsonutils.dump_as_bytes(body)
|
||||
resp = req.get_response(self.app)
|
||||
self.assertEqual(http_client.ACCEPTED, resp.status_code)
|
||||
|
||||
def test_upload_vnf_package_from_uri_with_invalid_uuid(self):
|
||||
body = {"addressInformation": "http://test_data.zip"}
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content/upload_from_uri'
|
||||
% constants.INVALID_UUID)
|
||||
self.assertRaises(exc.HTTPNotFound,
|
||||
self.controller.upload_vnf_package_from_uri,
|
||||
req, constants.INVALID_UUID, body=body)
|
||||
|
||||
@mock.patch.object(urllib.request, 'urlopen')
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_upload_vnf_package_from_uri_without_vnf_pack(self,
|
||||
mock_vnf_by_id,
|
||||
mock_url_open):
|
||||
body = {"addressInformation": "http://test_data.zip"}
|
||||
msg = _("Can not find requested vnf package: %s") % constants.UUID
|
||||
mock_vnf_by_id.side_effect = exc.HTTPNotFound(explanation=msg)
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content/upload_from_uri'
|
||||
% constants.UUID)
|
||||
self.assertRaises(exc.HTTPNotFound,
|
||||
self.controller.upload_vnf_package_from_uri,
|
||||
req, constants.UUID, body=body)
|
||||
|
||||
@mock.patch.object(urllib.request, 'urlopen')
|
||||
@mock.patch.object(vnf_package.VnfPackage, "get_by_id")
|
||||
def test_upload_vnf_package_from_uri_with_invalid_status(self,
|
||||
mock_vnf_by_id,
|
||||
mock_url_open):
|
||||
body = {"addressInformation": "http://test.zip"}
|
||||
vnf_obj = fakes.return_vnfpkg_obj()
|
||||
vnf_obj.__setattr__('onboarding_state', 'test')
|
||||
mock_vnf_by_id.return_value = vnf_obj
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content/upload_from_uri'
|
||||
% constants.UUID)
|
||||
self.assertRaises(exc.HTTPConflict,
|
||||
self.controller.upload_vnf_package_from_uri,
|
||||
req, constants.UUID, body=body)
|
||||
|
||||
def test_upload_vnf_package_from_uri_with_invalid_url(self):
|
||||
body = {"addressInformation": "http://test_data.zip"}
|
||||
req = fake_request.HTTPRequest.blank(
|
||||
'/vnf_packages/%s/package_content/upload_from_uri'
|
||||
% constants.UUID)
|
||||
self.assertRaises(exc.HTTPBadRequest,
|
||||
self.controller.upload_vnf_package_from_uri,
|
||||
req, constants.UUID, body=body)
|
@ -17,6 +17,7 @@
|
||||
Utility methods for working with WSGI servers
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import functools
|
||||
|
||||
import errno
|
||||
import os
|
||||
@ -93,6 +94,54 @@ def encode_body(body):
|
||||
return encodeutils.to_utf8(body)
|
||||
|
||||
|
||||
def expected_errors(errors):
|
||||
"""Decorator for Restful API methods which specifies expected exceptions.
|
||||
|
||||
Specify which exceptions may occur when an API method is called. If an
|
||||
unexpected exception occurs then return a 500 instead and ask the user
|
||||
of the API to file a bug report.
|
||||
"""
|
||||
def decorator(f):
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except Exception as exc:
|
||||
if isinstance(exc, webob.exc.WSGIHTTPException):
|
||||
if isinstance(errors, int):
|
||||
t_errors = (errors,)
|
||||
else:
|
||||
t_errors = errors
|
||||
if exc.code in t_errors:
|
||||
raise
|
||||
elif isinstance(exc, exception.Forbidden):
|
||||
# Note(nirajsingh): Special case to handle
|
||||
# Forbidden exceptions so every
|
||||
# extension method does not need to wrap authorize
|
||||
# calls. ResourceExceptionHandler silently
|
||||
# converts NotAuthorized to HTTPForbidden
|
||||
raise
|
||||
elif isinstance(exc, exception.ValidationError):
|
||||
# Note(nirajsingh): Handle a validation error, which
|
||||
# happens due to invalid API parameters, as an
|
||||
# expected error.
|
||||
raise
|
||||
elif isinstance(exc, exception.NotAuthorized):
|
||||
# Handle an authorized exception, will be
|
||||
# automatically converted to a HTTP 401.
|
||||
raise
|
||||
|
||||
LOG.exception("Unexpected exception in API method")
|
||||
msg = _('Unexpected API Error. Please report this at '
|
||||
'http://bugs.launchpad.net/tacker/ and attach the Tacker '
|
||||
'API log if possible.\n%s') % type(exc)
|
||||
raise webob.exc.HTTPInternalServerError(explanation=msg)
|
||||
|
||||
return wrapped
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class WorkerService(common_service.ServiceBase):
|
||||
"""Wraps a worker to be handled by ProcessLauncher."""
|
||||
|
||||
@ -354,7 +403,7 @@ class Request(webob.Request):
|
||||
return bm or 'application/json'
|
||||
|
||||
def get_content_type(self):
|
||||
allowed_types = ("application/json")
|
||||
allowed_types = ("application/json", "application/zip")
|
||||
if "Content-Type" not in self.headers:
|
||||
LOG.debug("Missing Content-Type")
|
||||
return None
|
||||
@ -429,6 +478,7 @@ class ResponseSerializer(object):
|
||||
def __init__(self, body_serializers=None, headers_serializer=None):
|
||||
self.body_serializers = {
|
||||
'application/json': JSONDictSerializer(),
|
||||
'application/zip': JSONDictSerializer()
|
||||
}
|
||||
self.body_serializers.update(body_serializers or {})
|
||||
|
||||
@ -827,6 +877,21 @@ class ResourceExceptionHandler(object):
|
||||
return False
|
||||
|
||||
|
||||
def response(code):
|
||||
"""Attaches response code to a method.
|
||||
|
||||
This decorator associates a response code with a method. Note
|
||||
that the function attributes are directly manipulated; the method
|
||||
is not wrapped.
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
func.wsgi_code = code
|
||||
return func
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class ResponseObject(object):
|
||||
"""Bundles a response object
|
||||
|
||||
@ -1084,6 +1149,15 @@ class Controller(object):
|
||||
|
||||
"""
|
||||
|
||||
_view_builder_class = None
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize controller with a view builder instance."""
|
||||
if self._view_builder_class:
|
||||
self._view_builder = self._view_builder_class()
|
||||
else:
|
||||
self._view_builder = None
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, req):
|
||||
"""Call the method specified in req.environ by RoutesMiddleware."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user