Merge "Fix issue of v1 grant-request setting incorrect"

This commit is contained in:
Zuul
2024-09-11 14:13:26 +00:00
committed by Gerrit Code Review
6 changed files with 218 additions and 5 deletions

View File

@@ -1182,7 +1182,8 @@ class Conductor(manager.Manager, v2_hook.ConductorV2Hook):
key = policy_dict['type']
placement_constraint.affinity_or_anti_affinity = (
affinity_type[key])
placement_constraint.scope = 'ZONE'
placement_constraint.scope = (
policy_dict['properties']['scope'].upper())
placement_constraint.resource = []
placement_constraint.fallback_best_effort = True
for target in policy_dict.get('targets', []):
@@ -1198,7 +1199,6 @@ class Conductor(manager.Manager, v2_hook.ConductorV2Hook):
p_rsc = \
placement_constraint.resource
p_rsc.append(resource)
break
else:
for vnfc_rsc in vnf_inf.vnfc_resource_info:
if target == vnfc_rsc.vdu_id:
@@ -1208,7 +1208,6 @@ class Conductor(manager.Manager, v2_hook.ConductorV2Hook):
resource.resource_id = vnfc_rsc.id
p_rsc = placement_constraint.resource
p_rsc.append(resource)
break
p_c_list.append(placement_constraint)
placement_obj = models.PlacementConstraint()
placement_obj.id = uuidutils.generate_uuid()

View File

@@ -19,6 +19,7 @@ import fixtures
import iso8601
import json
import os
import re
import requests
import shutil
import sys
@@ -30,6 +31,7 @@ import yaml
from glance_store import exceptions as store_exceptions
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from tacker import auth
@@ -539,6 +541,105 @@ class TestConductor(SqlTestCase, unit_base.FixturedTestCase):
self.assertEqual(mock_send_notification.call_count, 2)
self.assertEqual(mock_change_vnf_status.call_count, 1)
@mock.patch('tacker.conductor.conductor_server.Conductor'
'.send_notification')
@mock.patch('tacker.conductor.conductor_server.Conductor'
'._update_vnf_attributes')
@mock.patch('tacker.conductor.conductor_server.Conductor'
'._change_vnf_status')
@mock.patch('tacker.conductor.conductor_server.Conductor'
'._build_instantiated_vnf_info')
@mock.patch('tacker.conductor.conductor_server.Conductor'
'._check_res_add_remove_rsc')
@mock.patch.object(objects.VnfLcmOpOcc, 'save')
@mock.patch.object(conductor_server.Conductor, '_get_grant_execute')
@mock.patch.object(test_nfvo_client.GrantRequest, 'grants')
@mock.patch.object(coordination.Coordinator, 'get_lock')
@mock.patch('tacker.vnflcm.utils._get_vnfd_dict')
@mock.patch('tacker.vnflcm.utils._convert_desired_capacity')
@mock.patch.object(objects.VnfLcmOpOcc, 'get_by_id')
def test_instantiate_grant_req_with_affinity(self, mock_vnf_by_id,
mock_des, mock_vnfd_dict, mock_get_lock, mock_grants, mock_exec,
mock_save, mock_check, mock_build_info, mock_change_vnf_status,
mock_update_vnf_attributes, mock_send_notification):
lcm_op_occs_data = fakes.get_lcm_op_occs_data()
mock_vnf_by_id.return_value = (
objects.VnfLcmOpOcc(context=self.context, **lcm_op_occs_data))
vnf_package_vnfd = self._create_and_upload_vnf_package()
vnf_instance_data = fake_obj.get_vnf_instance_data(
vnf_package_vnfd.vnfd_id)
vnf_instance = objects.VnfInstance(context=self.context,
**vnf_instance_data)
vnf_instance.create()
instantiate_vnf_req = vnflcm_fakes.get_instantiate_vnf_request_obj()
vnf_lcm_op_occs_id = uuidsentinel.vnf_lcm_op_occs_id
vnf_dict = db_utils.get_dummy_vnf_etsi(instance_id=self.instance_uuid,
flavour=instantiate_vnf_req.flavour_id,
vnfd_name='etsi_vnfd_affinity')
vnf_dict['before_error_point'] = fields.ErrorPoint.INITIAL
vnfd_key = f'vnfd_{instantiate_vnf_req.flavour_id}'
vnfd_yaml = vnf_dict['vnfd']['attributes'].get(vnfd_key, '')
mock_vnfd_dict.return_value = yaml.safe_load(vnfd_yaml)
vimAssets = {
'computeResourceFlavours': [
{'vimConnectionId': uuidsentinel.vim_id,
'vnfdVirtualComputeDescId': 'CDU1',
'vimFlavourId': 'm1.tiny'}
],
'softwareImages': [
{'vimConnectionId': uuidsentinel.vim_id,
'vnfdSoftwareImageId': 'VDU1',
'vimSoftwareImageId': 'cirros'}
]
}
resAddResource = []
resource = {
'resourceDefinitionId': '2c6e5cc7-240d-4458-a683-1fe648351280',
'vimConnectionId': uuidsentinel.vim_id,
'zoneId': '5e4da3c3-4a55-412a-b624-843921f8b51d'}
resAddResource.append(resource)
resource = {
'resourceDefinitionId': 'faf14707-da7c-4eec-be99-8099fa1e9fa9',
'vimConnectionId': uuidsentinel.vim_id,
'zoneId': '5e4da3c3-4a55-412a-b624-843921f8b51d'}
resAddResource.append(resource)
resource = {
'resourceDefinitionId': 'faf14707-da7c-4eec-be99-8099fa1e9fa0',
'vimConnectionId': uuidsentinel.vim_id,
'zoneId': '5e4da3c3-4a55-412a-b624-843921f8b51d'}
resAddResource.append(resource)
resource = {
'resourceDefinitionId': 'faf14707-da7c-4eec-be99-8099fa1e9fa1',
'vimConnectionId': uuidsentinel.vim_id,
'zoneId': '5e4da3c3-4a55-412a-b624-843921f8b51d'}
resAddResource.append(resource)
grant_dict = {
'id': 'c213e465-8220-487e-9464-f79104e81e96',
'vnfInstanceId': vnf_instance.id,
'vnfLcmOpOccId': vnf_lcm_op_occs_id,
'addResources': resAddResource,
'vimAssets': vimAssets
}
mock_grants.return_value = MockResponse(json_data=grant_dict)
log_name = 'tacker.conductor.conductor_server'
with self.assertLogs(logger=log_name, level=logging.INFO) as cm:
self.conductor.instantiate(self.context, vnf_instance, vnf_dict,
instantiate_vnf_req, vnf_lcm_op_occs_id)
self.assertEqual(mock_send_notification.call_count, 2)
self.assertEqual(mock_change_vnf_status.call_count, 1)
chk_log = re.sub(r'INFO:.+:grant start grant_request', '',
cm.output[0]
)
chk_place = json.loads(chk_log)[0]['placementConstraints']
self.assertEqual(chk_place[0]['affinityOrAntiAffinity'], 'AFFINITY')
self.assertEqual(chk_place[0]['scope'], 'ZONE')
self.assertEqual(chk_place[1]['affinityOrAntiAffinity'],
'ANTI_AFFINITY')
self.assertEqual(chk_place[1]['scope'], 'NFVI_NODE')
@mock.patch('tacker.conductor.conductor_server.Conductor'
'._update_vnf_attributes')
@mock.patch('tacker.conductor.conductor_server.Conductor'

View File

@@ -28,6 +28,7 @@ def _get_template(name):
etsi_vnfd = _get_template('etsi_nfv/tosca_vnfd.yaml')
etsi_vnfd_group = _get_template('etsi_nfv/tosca_vnfd_group_member.yaml')
etsi_vnfd_affinity = _get_template('etsi_nfv/tosca_vnfd_affinity.yaml')
hot_scale_grant = _get_template('hot_scale_grant.yaml')
hot_scale_nest_grant = _get_template('hot_scale_nest_grant.yaml')
hot_scale_initial = _get_template('hot_scale_initial.yaml')
@@ -85,6 +86,8 @@ def get_dummy_vnf_etsi(status='PENDING_CREATE', scaling_group=False,
if not vnfd_name:
# Set vnfd including without "tosca.groups.nfv.PlacementGroup"
dummy_vnf['vnfd']['attributes'] = {vnfd_key: etsi_vnfd}
elif vnfd_name == 'etsi_vnfd_affinity':
dummy_vnf['vnfd']['attributes'] = {vnfd_key: etsi_vnfd_affinity}
else:
# Set vnfd including with "tosca.groups.nfv.PlacementGroup"
dummy_vnf['vnfd']['attributes'] = {vnfd_key: etsi_vnfd_group}

View File

@@ -0,0 +1,108 @@
tosca_definitions_version: tosca_simple_yaml_1_2
description: >
Template for test _generate_hot_from_tosca().
imports:
- etsi_nfv_sol001_common_types.yaml
- etsi_nfv_sol001_vnfd_types.yaml
node_types:
topology_template:
node_templates:
VDU1:
type: tosca.nodes.nfv.Vdu.Compute
properties:
name: VDU1
description: VDU1 compute node
vdu_profile:
min_number_of_instances: 1
max_number_of_instances: 1
sw_image_data:
name: Software of VDU1
version: '0.5.2'
checksum:
algorithm: sha-256
hash: 932fcae93574e242dc3d772d5235061747dfe537668443a1f0567d893614b464
container_format: bare
disk_format: qcow2
min_disk: 1 GiB
size: 1 GiB
artifacts:
sw_image:
type: tosca.artifacts.nfv.SwImage
file: Files/images/cirros-0.5.2-x86_64-disk.img
capabilities:
virtual_compute:
properties:
virtual_memory:
virtual_mem_size: 512 MiB
virtual_cpu:
num_virtual_cpu: 1
virtual_local_storage:
- size_of_storage: 1 GiB
requirements:
- virtual_storage: VB1
VB1:
type: tosca.nodes.nfv.Vdu.VirtualBlockStorage
properties:
virtual_block_storage_data:
size_of_storage: 100 GB
rdma_enabled: true
sw_image_data:
name: cirros
version: '0.0.0'
checksum:
algorithm: sha512
hash: f0fd1b50420dce4ca382ccfbb528eef3a38bbeff00b54e95e3876b9bafe7ed2d6f919ca35d9046d437c6d2d8698b1174a335fbd66035bb3edc525d2cdb187232
container_format: bare
disk_format: qcow2
min_disk: 0 B
min_ram: 0 B
size: 13267968 B
CP1:
type: tosca.nodes.nfv.VduCp
properties:
layer_protocols: [ ipv4 ]
order: 0
requirements:
- virtual_binding: VDU1
- virtual_link: VL3
VL3:
type: tosca.nodes.nfv.VnfVirtualLink
properties:
connectivity_type:
layer_protocols: [ ipv4 ]
description: Internal Virtual link in the VNF
vl_profile:
max_bitrate_requirements:
root: 1048576
leaf: 1048576
min_bitrate_requirements:
root: 1048576
leaf: 1048576
virtual_link_protocol_data:
- associated_layer_protocol: ipv4
l3_protocol_data:
ip_version: ipv4
cidr: 33.33.0.0/24
groups:
VDU1:
type: tosca.groups.nfv.PlacementGroup
members: [ VDU1 ]
policies:
- policy_affinity_local_VDU1:
type: tosca.policies.nfv.AffinityRule
targets: [ VDU1 ]
properties:
scope: zone
- policy_antiaffinity_local_VDU1:
type: tosca.policies.nfv.AntiAffinityRule
targets: [ VDU1 ]
properties:
scope: nfvi_node

View File

@@ -2564,7 +2564,7 @@ class TestOpenStack(base.FixturedTestCase):
id='c2947d8a-2c67-4e8f-ad6f-c0889b351c17',
vnf_instance_id=uuidsentinel.vnf_instance_id,
affinity_or_anti_affinity='ANTI_AFFINITY',
scope='ZONE',
scope='NFVI_NODE',
server_group_name='my_compute_placement_policy',
resource=test_res)
placement_obj_list = [placemnt]
@@ -2588,6 +2588,8 @@ class TestOpenStack(base.FixturedTestCase):
vim_connection_info,
del_list)
self.assertEqual(1, len(vnf_info['placement_constraint_list']))
self.assertEqual('NFVI_NODE',
vnf_info['placement_constraint_list'][0].scope)
@mock.patch.object(hc.HeatClient, "resource_get_list")
def test_get_grant_resource_scale_in(self, mock_list):

View File

@@ -2239,7 +2239,7 @@ class OpenStack(abstract_driver.VnfAbstractDriver,
placement.affinity_or_anti_affinity
placement_constraint = objects.PlacementConstraint(
affinity_or_anti_affinity=affinity_or_anti_affinity,
scope='ZONE',
scope=placement.scope,
resource=addRsc,
fallback_best_effort=True)
placement_constraint_list.append(placement_constraint)