Add support cnf auto heal and scale

Support container based VNF AutoHeal and AutoScale operation with
External Monitoring Tools.

Add the Fault Management interfaces and CLI to support AutoHeal.
Add the Performance Management interfaces and CLI to support
AutoScale. The Fault Management and Performance Management
interfaces are based on ETSI NFV-SOL 002 v3.3.1 and ETSI NFV-SOL
003 v3.3.1, which are Version "2.0.0" API of Tacker. Add the
Prometheus Plugin that has a interface between tacker and Prometheus
as a sample of External Monitoring Tool.

Implements: blueprint support-auto-lcm
Change-Id: Ib74305f0b1da4eb8d996ebae400e75902aaa1321
This commit is contained in:
Koji Shimizu 2022-08-24 08:10:22 +00:00
parent 46da7f8f59
commit d1a23a3c28
82 changed files with 8771 additions and 21 deletions

View File

@ -561,7 +561,13 @@
$TACKER_CONF:
v2_vnfm:
kubernetes_vim_rsc_wait_timeout: 800
prometheus_plugin:
fault_management: True
performance_management: True
auto_scaling: True
tox_envlist: dsvm-functional-sol-kubernetes-v2
vars:
prometheus_setup: true
- job:
name: tacker-functional-devstack-kubernetes-oidc-auth

View File

@ -6,6 +6,11 @@ use = egg:Paste#urlmap
/vnflcm: vnflcm_versions
/vnflcm/v1: vnflcm_v1
/vnflcm/v2: vnflcm_v2
/vnffm/v1: vnffm_v1
/vnfpm/v2: vnfpm_v2
/alert/vnf_instances: prometheus_auto_scaling
/alert: prometheus_fm
/pm_event: prometheus_pm
[composite:tackerapi_v1_0]
use = call:tacker.auth:pipeline_factory
@ -27,11 +32,21 @@ use = call:tacker.auth:pipeline_factory
noauth = request_id catch_errors vnflcmaapp_v2
keystone = request_id catch_errors authtoken keystonecontext vnflcmaapp_v2
[composite:vnfpm_v2]
use = call:tacker.auth:pipeline_factory
noauth = request_id catch_errors vnfpmaapp_v2
keystone = request_id catch_errors authtoken keystonecontext vnfpmaapp_v2
[composite:vnflcm_versions]
use = call:tacker.auth:pipeline_factory
noauth = request_id catch_errors vnflcm_api_versions
keystone = request_id catch_errors authtoken keystonecontext vnflcm_api_versions
[composite:vnffm_v1]
use = call:tacker.auth:pipeline_factory
noauth = request_id catch_errors vnffmaapp_v1
keystone = request_id catch_errors authtoken keystonecontext vnffmaapp_v1
[filter:request_id]
paste.filter_factory = oslo_middleware:RequestId.factory
@ -65,5 +80,20 @@ paste.app_factory = tacker.api.vnflcm.v1.router:VnflcmAPIRouter.factory
[app:vnflcmaapp_v2]
paste.app_factory = tacker.sol_refactored.api.router:VnflcmAPIRouterV2.factory
[app:vnfpmaapp_v2]
paste.app_factory = tacker.sol_refactored.api.router:VnfPmAPIRouterV2.factory
[app:vnflcm_api_versions]
paste.app_factory = tacker.sol_refactored.api.router:VnflcmVersions.factory
[app:vnffmaapp_v1]
paste.app_factory = tacker.sol_refactored.api.router:VnffmAPIRouterV1.factory
[app:prometheus_auto_scaling]
paste.app_factory = tacker.sol_refactored.api.prometheus_plugin_router:AutoScalingRouter.factory
[app:prometheus_fm]
paste.app_factory = tacker.sol_refactored.api.prometheus_plugin_router:FmAlertRouter.factory
[app:prometheus_pm]
paste.app_factory = tacker.sol_refactored.api.prometheus_plugin_router:PmEventRouter.factory

View File

@ -6,6 +6,8 @@
- setup-k8s-oidc
- setup-default-vim
- setup-helm
- role: setup-fake-prometheus-server
when: prometheus_setup is defined and prometheus_setup | bool
- role: setup-multi-tenant-vim
when: setup_multi_tenant is defined and setup_multi_tenant | bool
- role: bindep

View File

@ -0,0 +1,17 @@
---
features:
- |
Support container based VNF AutoHeal and AutoScale operation with External
Monitoring Tools.
Add the Fault Management interfaces and CLI to support AutoHeal. AutoHeal
supports two modes, Polling Mode and Notification Mode.
Add the Performance Management interfaces and CLI to support AutoScale.
The Fault Management and Performance Management interfaces are based on
ETSI NFV-SOL 002 v3.3.1 and ETSI NFV-SOL 003 v3.3.1, which are Version
"2.0.0" API of Tacker.
Add the Prometheus Plugin that has a interface between tacker and
the External Monitoring Tool. Prometheus Plugin supports data model
conversion from Prometheus format data to SOL based FM/PM schema,
and vice versa.
At the same time, a user guide is also made to help users understand the
function.

View File

@ -0,0 +1,39 @@
- block:
- name: Copy tools/test-setup-fake-prometheus-server.sh
copy:
remote_src=True
src={{ devstack_base_dir }}/tacker/tools/test-setup-fake-prometheus-server.sh
dest={{ zuul_work_dir }}/tools/test-setup-fake-prometheus-server.sh
mode=0755
- name: Get stackenv from devstack environment
slurp:
src: "{{ devstack_base_dir }}/devstack/.stackenv"
register: stackenv
- name: Install docker
package:
name: docker.io
state: present
become: yes
- name: Replace prometheus host ip
replace:
path: "{{ item }}"
regexp: "0.0.0.0"
replace: "{{ hostvars['controller-tacker']['nodepool']['private_ipv4'] }}"
with_items:
- "{{ zuul_work_dir }}/tools/test-setup-fake-prometheus-server.sh"
when:
- p.stat.exists
- name: Run tools/test-setup-fake-prometheus-server.sh
command: tools/test-setup-fake-prometheus-server.sh
args:
chdir: "{{ zuul_work_dir }}"
when:
- p.stat.exists
- p.stat.executable
when:
- inventory_hostname == 'controller-tacker'

View File

@ -1 +1 @@
85c06a0714b7
de8d835ae776

View File

@ -0,0 +1,92 @@
# Copyright 2022 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""add_pm_and_fm_table
Revision ID: de8d835ae776
Revises: 85c06a0714b7
Create Date: 2022-07-21 17:34:45.675428
"""
# flake8: noqa: E402
# revision identifiers, used by Alembic.
revision = 'de8d835ae776'
down_revision = '85c06a0714b7'
from alembic import op
import sqlalchemy as sa
def upgrade(active_plugins=None, options=None):
op.create_table('AlarmV1',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('managedObjectId', sa.String(length=255), nullable=False),
sa.Column('vnfcInstanceIds', sa.JSON(), nullable=True),
sa.Column('rootCauseFaultyResource', sa.JSON(), nullable=True),
sa.Column('alarmRaisedTime', sa.DateTime(), nullable=False),
sa.Column('alarmChangedTime', sa.DateTime(), nullable=True),
sa.Column('alarmClearedTime', sa.DateTime(), nullable=True),
sa.Column('alarmAcknowledgedTime', sa.DateTime(), nullable=True),
sa.Column('ackState', sa.Enum(
'UNACKNOWLEDGED', 'ACKNOWLEDGED'), nullable=False),
sa.Column('perceivedSeverity', sa.Enum(
'CRITICAL', 'MAJOR', 'MINOR', 'WARNING',
'INDETERMINATE', 'CLEARED'), nullable=False),
sa.Column('eventTime', sa.DateTime(), nullable=False),
sa.Column('eventType', sa.Enum(
'COMMUNICATIONS_ALARM', 'PROCESSING_ERROR_ALARM',
'ENVIRONMENTAL_ALARM', 'QOS_ALARM',
'EQUIPMENT_ALARM'), nullable=False),
sa.Column('faultType', sa.String(length=255), nullable=True),
sa.Column('probableCause', sa.String(length=255), nullable=False),
sa.Column('isRootCause', sa.Boolean(), nullable=False),
sa.Column('correlatedAlarmIds', sa.JSON(), nullable=True),
sa.Column('faultDetails', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB'
)
op.create_table('FmSubscriptionV1',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('filter', sa.JSON(), nullable=True),
sa.Column('callbackUri', sa.String(length=255), nullable=False),
sa.Column('authentication', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB'
)
op.create_table('PmJobV2',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('objectType', sa.String(length=32), nullable=False),
sa.Column('objectInstanceIds', sa.JSON(), nullable=False),
sa.Column('subObjectInstanceIds', sa.JSON(), nullable=True),
sa.Column('criteria', sa.JSON(), nullable=False),
sa.Column('callbackUri', sa.String(length=255), nullable=False),
sa.Column('reports', sa.JSON(), nullable=True),
sa.Column('authentication', sa.JSON(), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB'
)
op.create_table('PerformanceReportV2',
sa.Column('id', sa.String(length=255), nullable=False),
sa.Column('jobId', sa.String(length=255), nullable=False),
sa.Column('entries', sa.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB'
)

View File

@ -19,7 +19,9 @@ import itertools
from tacker.policies import base
from tacker.policies import vnf_lcm
from tacker.policies import vnf_package
from tacker.sol_refactored.api.policies import vnffm_v1
from tacker.sol_refactored.api.policies import vnflcm_v2
from tacker.sol_refactored.api.policies import vnfpm_v2
def list_rules():
@ -28,4 +30,6 @@ def list_rules():
vnf_package.list_rules(),
vnf_lcm.list_rules(),
vnflcm_v2.list_rules(),
vnffm_v1.list_rules(),
vnfpm_v2.list_rules(),
)

View File

@ -33,7 +33,23 @@ supported_versions_v2 = {
]
}
supported_fm_versions_v1 = {
'uriPrefix': '/vnffm/v1',
'apiVersions': [
{'version': '1.3.0', 'isDeprecated': False}
]
}
supported_pm_versions_v2 = {
'uriPrefix': '/vnfpm/v2',
'apiVersions': [
{'version': '2.1.0', 'isDeprecated': False}
]
}
CURRENT_VERSION = '2.0.0'
CURRENT_FM_VERSION = '1.3.0'
CURRENT_PM_VERSION = '2.1.0'
v1_versions = [
item['version'] for item in supported_versions_v1['apiVersions']
@ -43,6 +59,14 @@ v2_versions = [
item['version'] for item in supported_versions_v2['apiVersions']
]
v1_fm_versions = [
item['version'] for item in supported_fm_versions_v1['apiVersions']
]
v2_pm_versions = [
item['version'] for item in supported_pm_versions_v2['apiVersions']
]
class APIVersion(object):

View File

@ -0,0 +1,110 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
POLICY_NAME = 'os_nfv_orchestration_api_v2:vnf_fault_monitor:{}'
RULE_ANY = '@'
V1_PATH = '/vnffm/v1'
ALARMS_PATH = V1_PATH + '/alarms'
ALARMS_ID_PATH = ALARMS_PATH + '/{alarmId}'
SUBSCRIPTIONS_PATH = V1_PATH + '/subscriptions'
SUBSCRIPTIONS_ID_PATH = SUBSCRIPTIONS_PATH + '/{subscriptionId}'
POLICY_NAME_PROM_PLUGIN = 'tacker_PROM_PLUGIN_api:PROM_PLUGIN:{}'
PROM_PLUGIN_FM_PATH = '/alert'
rules = [
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('index'),
check_str=RULE_ANY,
description="Query FM alarms.",
operations=[
{'method': 'GET',
'path': ALARMS_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('show'),
check_str=RULE_ANY,
description="Query an Individual FM alarm.",
operations=[
{'method': 'GET',
'path': ALARMS_ID_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('update'),
check_str=RULE_ANY,
description="Modify FM alarm information.",
operations=[
{'method': 'PATCH',
'path': ALARMS_ID_PATH}
]
),
# NOTE: add when the operation supported
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('subscription_create'),
check_str=RULE_ANY,
description="Create subscription.",
operations=[
{'method': 'POST',
'path': SUBSCRIPTIONS_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('subscription_list'),
check_str=RULE_ANY,
description="List subscription.",
operations=[
{'method': 'GET',
'path': SUBSCRIPTIONS_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('subscription_show'),
check_str=RULE_ANY,
description="Show subscription.",
operations=[
{'method': 'GET',
'path': SUBSCRIPTIONS_ID_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('subscription_delete'),
check_str=RULE_ANY,
description="Delete subscription.",
operations=[
{'method': 'DELETE',
'path': SUBSCRIPTIONS_ID_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME_PROM_PLUGIN.format('alert'),
check_str=RULE_ANY,
description="Receive the alert sent from External Monitoring Tool",
operations=[
{'method': 'POST',
'path': PROM_PLUGIN_FM_PATH}
]
)
]
def list_rules():
return rules

View File

@ -0,0 +1,132 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
POLICY_NAME = 'os_nfv_orchestration_api_v2:vnf_performance_management:{}'
RULE_ANY = '@'
V2_PATH = '/vnfpm/v2'
PM_JOB_PATH = V2_PATH + '/pm_jobs'
PM_JOB_ID_PATH = PM_JOB_PATH + '/{pmJobId}'
REPORT_GET = '/vnfpm/v2/pm_jobs/{id}/reports/{report_id}'
POLICY_NAME_PROM_PLUGIN = 'tacker_PROM_PLUGIN_api:PROM_PLUGIN:{}'
PROM_PLUGIN_PM_PATH = '/pm_event'
PROM_PLUGIN_AUTO_SCALING_PATH = '/alert/vnf_instances'
rules = [
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('create'),
check_str=RULE_ANY,
description="Create a PM job.",
operations=[
{
'method': 'POST',
'path': PM_JOB_PATH
}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('index'),
check_str=RULE_ANY,
description="Query PM jobs.",
operations=[
{
'method': 'GET',
'path': PM_JOB_PATH
}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('update'),
check_str=RULE_ANY,
description="Update a PM job.",
operations=[
{
'method': 'PATCH',
'path': PM_JOB_ID_PATH
}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('show'),
check_str=RULE_ANY,
description="Get an individual PM job.",
operations=[
{
'method': 'GET',
'path': PM_JOB_ID_PATH
}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('delete'),
check_str=RULE_ANY,
description="Delete a PM job.",
operations=[
{
'method': 'DELETE',
'path': PM_JOB_ID_PATH
}
]
),
# Add new Rest API GET /vnfpm/v2/pm_jobs/{id}/reports/{report_id} to
# get the specified PM report.
policy.DocumentedRuleDefault(
name=POLICY_NAME.format('report_get'),
check_str=RULE_ANY,
description="Get an individual performance report.",
operations=[
{
'method': 'GET',
'path': REPORT_GET
}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME_PROM_PLUGIN.format('pm_event'),
check_str=RULE_ANY,
description="Receive the PM event sent from External Monitoring Tool",
operations=[
{'method': 'POST',
'path': PROM_PLUGIN_PM_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME_PROM_PLUGIN.format('auto_scaling'),
check_str=RULE_ANY,
description="auto_scaling",
operations=[
{'method': 'POST',
'path': PROM_PLUGIN_AUTO_SCALING_PATH}
]
),
policy.DocumentedRuleDefault(
name=POLICY_NAME_PROM_PLUGIN.format('auto_scaling_id'),
check_str=RULE_ANY,
description="auto_scaling_id",
operations=[
{'method': 'POST',
'path': PROM_PLUGIN_AUTO_SCALING_PATH + '/{vnfInstanceId}'}
]
)
]
def list_rules():
return rules

View File

@ -0,0 +1,43 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.api.policies import vnffm_v1 as vnffm_policy_v1
from tacker.sol_refactored.api.policies import vnfpm_v2 as vnfpm_policy_v2
from tacker.sol_refactored.api import prometheus_plugin_wsgi as prom_wsgi
from tacker.sol_refactored.controller import prometheus_plugin_controller
class PmEventRouter(prom_wsgi.PrometheusPluginAPIRouter):
controller = prom_wsgi.PrometheusPluginResource(
prometheus_plugin_controller.PmEventController(),
policy_name=vnfpm_policy_v2.POLICY_NAME_PROM_PLUGIN)
route_list = [("", {"POST": "pm_event"})]
class FmAlertRouter(prom_wsgi.PrometheusPluginAPIRouter):
controller = prom_wsgi.PrometheusPluginResource(
prometheus_plugin_controller.FmAlertController(),
policy_name=vnffm_policy_v1.POLICY_NAME_PROM_PLUGIN)
route_list = [("", {"POST": "alert"})]
class AutoScalingRouter(prom_wsgi.PrometheusPluginAPIRouter):
controller = prom_wsgi.PrometheusPluginResource(
prometheus_plugin_controller.AutoScalingController(),
policy_name=vnfpm_policy_v2.POLICY_NAME_PROM_PLUGIN)
route_list = [
("", {"POST": "auto_scaling"}),
("/{id}", {"POST": "auto_scaling"})
]

View File

@ -0,0 +1,46 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from tacker.api.validation import validators
from tacker.common import exceptions as tacker_ex
from tacker.sol_refactored.common import exceptions as sol_ex
class PrometheusPluginSchemaValidator(validators._SchemaValidator):
def validate(self, *args, **kwargs):
try:
super(PrometheusPluginSchemaValidator, self).validate(
*args, **kwargs)
except tacker_ex.ValidationError as ex:
raise sol_ex.PrometheusPluginValidationError(detail=str(ex))
def schema(request_body_schema):
def add_validator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if 'body' not in kwargs:
raise sol_ex.PrometheusPluginValidationError(
detail="body is missing.")
schema_validator = PrometheusPluginSchemaValidator(
request_body_schema)
schema_validator.validate(kwargs['body'])
return func(*args, **kwargs)
return wrapper
return add_validator

View File

@ -0,0 +1,68 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.api import wsgi as sol_wsgi
import webob
LOG = logging.getLogger(__name__)
class PrometheusPluginResponse(sol_wsgi.SolResponse):
allowed_headers = ['content_type']
def __init__(self, status, body, **kwargs):
self.status = status
self.body = body
self.headers = {}
for hdr in self.allowed_headers:
if hdr in kwargs:
self.headers[hdr] = kwargs[hdr]
class PrometheusPluginErrorResponse(sol_wsgi.SolErrorResponse):
pass
class PrometheusPluginResource(sol_wsgi.SolResource):
@webob.dec.wsgify(RequestClass=sol_wsgi.SolRequest)
def __call__(self, request):
LOG.info("%(method)s %(url)s", {"method": request.method,
"url": request.url})
try:
action, args, accept = self._deserialize_request(request)
self._check_policy(request, action)
result = self._dispatch(request, action, args)
response = result.serialize(accept)
except Exception as ex:
result = PrometheusPluginErrorResponse(ex, request)
try:
response = result.serialize('application/problem+json')
except Exception:
LOG.exception("Unknown error")
return webob.exc.HTTPBadRequest(explanation="Unknown error")
LOG.info("%(url)s returned with HTTP %(status)d",
{"url": request.url, "status": response.status_int})
return response
class PrometheusPluginAPIRouter(sol_wsgi.SolAPIRouter):
pass
class PrometheusPluginAPIController(sol_wsgi.SolAPIController):
pass

View File

@ -14,10 +14,14 @@
# under the License.
from tacker.sol_refactored.api.policies import vnffm_v1 as vnffm_policy_v1
from tacker.sol_refactored.api.policies import vnflcm_v2 as vnflcm_policy_v2
from tacker.sol_refactored.api.policies import vnfpm_v2 as vnfpm_policy_v2
from tacker.sol_refactored.api import wsgi as sol_wsgi
from tacker.sol_refactored.controller import vnffm_v1
from tacker.sol_refactored.controller import vnflcm_v2
from tacker.sol_refactored.controller import vnflcm_versions
from tacker.sol_refactored.controller import vnfpm_v2
class VnflcmVersions(sol_wsgi.SolAPIRouter):
@ -57,3 +61,27 @@ class VnflcmAPIRouterV2(sol_wsgi.SolAPIRouter):
("/vnf_lcm_op_occs/{id}", {"GET": "lcm_op_occ_show",
"DELETE": "lcm_op_occ_delete"})
]
class VnffmAPIRouterV1(sol_wsgi.SolAPIRouter):
controller = sol_wsgi.SolResource(vnffm_v1.VnfFmControllerV1(),
policy_name=vnffm_policy_v1.POLICY_NAME)
route_list = [
("/alarms", {"GET": "index"}),
("/alarms/{id}", {"GET": "show", "PATCH": "update"}),
("/subscriptions", {"GET": "subscription_list",
"POST": "subscription_create"}),
("/subscriptions/{id}", {"GET": "subscription_show",
"DELETE": "subscription_delete"})
]
class VnfPmAPIRouterV2(sol_wsgi.SolAPIRouter):
controller = sol_wsgi.SolResource(vnfpm_v2.VnfPmControllerV2(),
policy_name=vnfpm_policy_v2.POLICY_NAME)
route_list = [
("/pm_jobs", {"POST": "create", "GET": "index"}),
("/pm_jobs/{id}", {
"PATCH": "update", "GET": "show", "DELETE": "delete"}),
("/pm_jobs/{id}/reports/{report_id}", {"GET": "report_get"}),
]

View File

@ -16,7 +16,6 @@
from tacker.api.validation import parameter_types
# SOL013 7.2.2
Identifier = {
'type': 'string', 'minLength': 1, 'maxLength': 255
@ -123,6 +122,40 @@ _IpAddresses = {
'additionalProperties': True
}
# SOL013 8.3.4
SubscriptionAuthentication = {
'type': 'object',
'properties': {
'authType': {
'type': 'array',
'items': {
'type': 'string',
'enum': [
'BASIC',
'OAUTH2_CLIENT_CREDENTIALS',
'TLS_CERT']
}
},
'paramsBasic': {
'type': 'object',
'properties': {
'userName': {'type': 'string'},
'password': {'type': 'string'}
}
},
'paramsOauth2ClientCredentials': {
'type': 'object',
'properties': {
'clientId': {'type': 'string'},
'clientPassword': {'type': 'string'},
'tokenEndpoint': {'type': 'string'}
}
}
},
'required': ['authType'],
'additionalProperties': True,
}
# SOL003 4.4.1.10c
IpOverEthernetAddressData = {
'type': 'object',

View File

@ -0,0 +1,87 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
Alert = {
'type': 'object',
'status': {
'type': 'string',
'enum': ['firing', 'resolved']
},
'properties': {
'status': {
'type': 'string',
'enum': ['firing', 'resolved']
},
'labels': {
'type': 'object',
'properties': {
'receiver_type': {
'type': 'string',
'enum': ['tacker']
},
'function_type': {
'type': 'string',
'enum': ['vnffm', 'vnfpm', 'auto_scale']
},
'job_id': {'type': 'string'},
'object_instance_id': {'type': 'string'},
'vnf_instance_id': {'type': 'string'},
'node': {'type': 'string'},
'perceived_severity': {
'type': 'string',
'enum': ['CRITICAL', 'MAJOR', 'MINOR', 'WARNING',
'INDETERMINATE', 'CLEARED']
},
'event_type': {'type': 'string'},
'auto_scale_type': {
'type': 'string',
'enum': ['SCALE_OUT', 'SCALE_IN']
},
'aspect_id': {'type': 'string'}
},
'required': ['receiver_type', 'function_type'],
'additionalProperties': True
},
'annotations': {
'type': 'object',
'properties': {
'value': {'type': 'number'},
'probable_cause': {'type': 'string'},
'fault_type': {'type': 'string'},
'fault_details': {'type': 'string'}
},
'required': [],
'additionalProperties': True
},
'startsAt': {'type': 'string'},
'endsAt': {'type': 'string'},
'fingerprint': {'type': 'string'}
},
'required': ['status', 'labels', 'annotations', 'startsAt',
'fingerprint'],
'additionalProperties': True
}
AlertMessage = {
'type': 'object',
'properties': {
'alerts': {
'type': 'array',
'items': Alert
}
},
'required': ['alerts']
}

View File

@ -0,0 +1,173 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.api.schemas import common_types
# SOL003 7.5.2.8
AlarmModifications_V130 = {
'type': 'object',
'properties': {
'ackState': {
'type': 'string',
'enum': ['ACKNOWLEDGED', 'UNACKNOWLEDGED']
}
},
'required': ['ackState'],
'additionalProperties': True,
}
# SOL003 4.4.1.5 inner
_VnfProductVersions = {
'type': 'array',
'items': {
'type': 'objects',
'properties': {
'vnfSoftwareVersion': {'type': 'string'},
'vnfdVersions': {
'type': 'array',
'items': {'type': 'string'}
}
},
'required': ['vnfSoftwareVersion'],
'additionalProperties': True,
}
}
# SOL003 4.4.1.5 inner
_VnfProducts = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'vnfProductName': {'type': 'string'},
'versions': _VnfProductVersions
},
'required': ['vnfProductName'],
'additionalProperties': True,
}
}
# SOL003 4.4.1.5 inner
_VnfProductsFromProviders = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'type': 'object',
'properties': {
'vnfProvider': {'type': 'string'},
'vnfProducts': _VnfProducts
}
},
'required': ['vnfProvider'],
'additionalProperties': True,
}
}
# SOL003 4.4.1.5
_VnfInstanceSubscriptionFilter = {
'type': 'object',
'properties': {
'vnfdIds': {
'type': 'array',
'items': common_types.Identifier
},
'vnfProductsFromProviders': _VnfProductsFromProviders,
'vnfInstanceIds': {
'type': 'array',
'items': common_types.Identifier
},
'vnfInstanceNames': {
'type': 'array',
'items': {'type': 'string'}
}
},
'additionalProperties': True,
}
# SOL003 7.5.3.2
_FmNotificationsFilter = {
'type': 'object',
'properties': {
'vnfInstanceSubscriptionFilter': _VnfInstanceSubscriptionFilter,
'notificationTypes': {
'type': 'array',
'items': {
'type': 'string',
'enum': [
'AlarmNotification',
'AlarmClearedNotification',
'AlarmListRebuiltNotification']
}
},
'faultyResourceTypes': {
'type': 'array',
'items': {
'type': 'string',
'enum': [
'COMPUTE',
'STORAGE',
'NETWORK']
}
},
'perceivedSeverities': {
'type': 'array',
'items': {
'type': 'string',
'enum': [
'CRITICAL',
'MAJOR',
'MINOR',
'WARNING',
'INDETERMINATE',
'CLEARED']
}
},
'eventTypes': {
'type': 'array',
'items': {
'type': 'string',
'enum': [
'COMMUNICATIONS_ALARM',
'PROCESSING_ERROR_ALARM',
'ENVIRONMENTAL_ALARM',
'QOS_ALARM',
'EQUIPMENT_ALARM']
}
},
'probableCauses': {
'type': 'array',
'items': {'type': 'string'}
}
},
'additionalProperties': True,
}
# SOL003 7.5.2.2
FmSubscriptionRequest_V130 = {
'type': 'object',
'properties': {
'filter': _FmNotificationsFilter,
'callbackUri': {'type': 'string', 'maxLength': 255},
'authentication': common_types.SubscriptionAuthentication,
'verbosity': {
'type': 'string',
'enum': ['FULL', 'SHORT']
}
},
'required': ['callbackUri'],
'additionalProperties': True,
}

View File

@ -0,0 +1,86 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.api.schemas import common_types
# SOL003 6.5.3.3
_PmJobCriteria_V210 = {
'type': 'object',
'properties': {
'performanceMetric': {
'type': 'array',
'items': {'type:': 'string'}
},
'performanceMetricGroup': {
'type': 'array',
'items': {'type:': 'string'}
},
'collectionPeriod': {'type': 'integer'},
'reportingPeriod': {'type': 'integer'},
'reportingBoundary': {'type': 'string'}
},
'anyOf': [
{'required': ['performanceMetric']},
{'required': ['performanceMetricGroup']}
],
'required': ['collectionPeriod', 'reportingPeriod'],
'additionalProperties': True,
}
# SOL003 6.5.2.6
CreatePmJobRequest_V210 = {
'type': 'object',
'properties': {
'objectType': {
'type': 'string',
'enum': [
# TODO(YiFeng): Currently, this API only supports CNF, and
# supports the following types. When VNF is supported,
# the types can be extended.
'Vnf',
'Vnfc',
'VnfIntCP',
'VnfExtCP']
},
'objectInstanceIds': {
'type': 'array',
'items': common_types.Identifier
},
'subObjectInstanceIds': {
'type': 'array',
'items': common_types.IdentifierInVnf
},
'criteria': _PmJobCriteria_V210,
'callbackUri': {'type': 'string'},
'authentication': common_types.SubscriptionAuthentication,
},
'required': ['objectType', 'objectInstanceIds', 'criteria', 'callbackUri'],
'additionalProperties': True,
}
# SOL003 6.5.2.12
PmJobModificationsRequest_V210 = {
'type': 'object',
'properties': {
'callbackUri': {'type': 'string'},
'authentication': common_types.SubscriptionAuthentication
},
'anyOf': [
{'required': ['callbackUri']},
{'required': ['authentication']}
],
'required': [],
'additionalProperties': True,
}

View File

@ -133,6 +133,10 @@ class SolResource(object):
response = result.serialize(accept)
except Exception as ex:
result = SolErrorResponse(ex, request.best_match_language())
if type(self.controller).__name__ == 'VnfFmControllerV1':
result.headers['version'] = api_version.CURRENT_FM_VERSION
if type(self.controller).__name__ == 'VnfPmControllerV2':
result.headers['version'] = api_version.CURRENT_PM_VERSION
try:
response = result.serialize('application/problem+json')
except Exception:

View File

@ -56,6 +56,14 @@ VNFM_OPTS = [
'connection error when sending a notification. '
'Period between retries is exponential starting '
'0.5 seconds up to a maximum of 60 seconds.')),
cfg.IntOpt('vnffm_alarm_page_size',
default=0, # 0 means no paging
help=_('Paged response size of the query result '
'for VNF Fault Management alarm.')),
cfg.IntOpt('vnfpm_pmjob_page_size',
default=0, # 0 means no paging
help=_('Paged response size of the query result for '
'VNF PM job.')),
# NOTE: This is for test use since it is convenient to be able to delete
# under development.
cfg.BoolOpt('test_enable_lcm_op_occ_delete',
@ -104,7 +112,28 @@ NFVO_OPTS = [
CONF.register_opts(NFVO_OPTS, 'v2_nfvo')
PROMETHEUS_PLUGIN_OPTS = [
cfg.BoolOpt('performance_management',
default=False,
help=_('Enable prometheus plugin performance management')),
cfg.IntOpt('reporting_period_margin',
default=1,
help=_('Some margin time for PM jos\'s reportingPeriod')),
cfg.BoolOpt('fault_management',
default=False,
help=_('Enable prometheus plugin fault management')),
cfg.BoolOpt('auto_scaling',
default=False,
help=_('Enable prometheus plugin autoscaling')),
]
CONF.register_opts(PROMETHEUS_PLUGIN_OPTS, 'prometheus_plugin')
def config_opts():
return [('v2_nfvo', NFVO_OPTS),
('v2_vnfm', VNFM_OPTS)]
('v2_vnfm', VNFM_OPTS),
('prometheus_plugin', PROMETHEUS_PLUGIN_OPTS)]

View File

@ -67,3 +67,36 @@ def lock_vnf_instance(inst_arg, delay=False):
return wrapper
return operation_lock
def lock_resources(res_arg, delay=False):
def operation_lock(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
coord = coordination.COORDINATOR
# ensure coordination start
# NOTE: it is noop if already started.
coord.start()
sig = inspect.signature(func)
call_args = sig.bind(*args, **kwargs).arguments
res_id = res_arg.format(**call_args)
lock = coord.get_lock(res_id)
blocking = False if not delay else 10
# NOTE: 'with lock' is not used since it can't handle
# lock failed exception well.
if not lock.acquire(blocking=blocking):
LOG.debug("Locking resources %s failed.", res_id)
raise sol_ex.ResourcesOtherOperationInProgress(inst_id=res_id)
try:
LOG.debug("resources %s locked.", res_id)
return func(*args, **kwargs)
finally:
lock.release()
return wrapper
return operation_lock

View File

@ -92,6 +92,11 @@ class SolHttpError422(SolException):
title = 'Unprocessable Entity'
class SolHttpError503(SolException):
status = 503
title = 'Service Unavailable'
class MethodNotAllowed(SolHttpError405):
message = _("Method %(method)s is not supported.")
@ -372,3 +377,53 @@ class HelmOperationFailed(SolHttpError422):
class HelmParameterNotFound(SolHttpError400):
message = _("Helm parameter for scale vdu %(vdu_name)s is not found.")
class AlarmNotFound(SolHttpError404):
message = _("Alarm %(alarm_id)s not found.")
class AckStateInvalid(SolHttpError409):
message = _("The ackState of alarm cannot specify the same value.")
class FmSubscriptionNotFound(SolHttpError404):
message = _("FmSubscription %(subsc_id)s not found.")
class PMJobNotExist(SolHttpError404):
message = _("The specified PM job does not exist.")
class PMReportNotExist(SolHttpError404):
message = _("The specified Performance Report does not exist.")
class PMJobInvalidRequest(SolHttpError400):
message = _("Invalid request")
class ResourcesOtherOperationInProgress(SolHttpError409):
message = _("Other LCM operation of resources %(inst_id)s "
"is in progress.")
# prometheus plugin
class PrometheusPluginNotEnabled(SolHttpError404):
message = _("%(name)s API is not enabled.")
class PrometheusPluginError(Exception):
pass
class PrometheusPluginSkipped(Exception):
pass
class PrometheusPluginValidationError(SolValidationError):
pass
class PrometheusSettingFailed(SolHttpError503):
message = _("Setting PM job on External Monitoring Tool failed.")

View File

@ -0,0 +1,86 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
from oslo_log import log as logging
from oslo_utils import uuidutils
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_subscription_utils as subsc_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__) # not used at the moment
def get_alarm(context, alarm_id):
alarm = objects.AlarmV1.get_by_id(context, alarm_id)
if alarm is None:
raise sol_ex.AlarmNotFound(alarm_id=alarm_id)
return alarm
def get_alarms_all(context, marker=None):
return objects.AlarmV1.get_all(context, marker)
def get_not_cleared_alarms(context, inst_id):
return objects.AlarmV1.get_by_filter(context, managedObjectId=inst_id)
def alarm_href(alarm_id, endpoint):
return f"{endpoint}/vnffm/v1/alarms/{alarm_id}"
def make_alarm_links(alarm, endpoint):
links = objects.AlarmV1_Links()
links.self = objects.Link(href=alarm_href(alarm.id, endpoint))
links.objectInstance = objects.Link(
href=inst_utils.inst_href(alarm.managedObjectId, endpoint))
return links
def make_alarm_notif_data(subsc, alarm, endpoint):
if alarm.obj_attr_is_set('alarmClearedTime'):
notif_data = objects.AlarmClearedNotificationV1(
id=uuidutils.generate_uuid(),
notificationType="AlarmClearedNotification",
subscriptionId=subsc.id,
timeStamp=datetime.utcnow(),
alarmId=alarm.id,
alarmClearedTime=alarm.alarmClearedTime,
_links=objects.AlarmClearedNotificationV1_Links(
alarm=objects.NotificationLink(
href=alarm_href(alarm.id, endpoint)),
subscription=objects.NotificationLink(
href=subsc_utils.subsc_href(subsc.id, endpoint))
)
)
else:
notif_data = objects.AlarmNotificationV1(
id=uuidutils.generate_uuid(),
notificationType="AlarmNotification",
subscriptionId=subsc.id,
timeStamp=datetime.utcnow(),
alarm=alarm,
_links=objects.AlarmNotificationV1_Links(
subscription=objects.NotificationLink(
href=subsc_utils.subsc_href(subsc.id, endpoint))
)
)
return notif_data

View File

@ -0,0 +1,174 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from oslo_log import log as logging
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import subscription_utils as subsc_utils
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__)
CONF = config.CONF
TEST_NOTIFICATION_TIMEOUT = 20 # seconds
def get_subsc(context, subsc_id):
subsc = objects.FmSubscriptionV1.get_by_id(context, subsc_id)
if subsc is None:
raise sol_ex.FmSubscriptionNotFound(subsc_id=subsc_id)
return subsc
def get_subsc_all(context, marker=None):
return objects.FmSubscriptionV1.get_all(context, marker)
def subsc_href(subsc_id, endpoint):
return f"{endpoint}/vnffm/v1/subscriptions/{subsc_id}"
def _get_notification_auth_handle(subsc):
if not subsc.obj_attr_is_set('authentication'):
return http_client.NoAuthHandle()
if subsc.authentication.obj_attr_is_set('paramsBasic'):
param = subsc.authentication.paramsBasic
return http_client.BasicAuthHandle(param.userName, param.password)
if subsc.authentication.obj_attr_is_set(
'paramsOauth2ClientCredentials'):
param = subsc.authentication.paramsOauth2ClientCredentials
return http_client.OAuth2AuthHandle(None,
param.tokenEndpoint, param.clientId, param.clientPassword)
# not reach here
def async_call(func):
def inner(*args, **kwargs):
th = threading.Thread(target=func, args=args,
kwargs=kwargs, daemon=True)
th.start()
return inner
@async_call
def send_notification(subsc, notif_data):
auth_handle = _get_notification_auth_handle(subsc)
client = http_client.HttpClient(auth_handle,
version=api_version.CURRENT_FM_VERSION)
url = subsc.callbackUri
try:
resp, _ = client.do_request(
url, "POST", expected_status=[204], body=notif_data)
except sol_ex.SolException:
# it may occur if test_notification was not executed.
LOG.exception("send_notification failed")
if resp.status_code != 204:
LOG.error(f"send_notification failed: {resp.status_code}")
def test_notification(subsc):
auth_handle = _get_notification_auth_handle(subsc)
client = http_client.HttpClient(auth_handle,
version=api_version.CURRENT_FM_VERSION,
timeout=TEST_NOTIFICATION_TIMEOUT)
url = subsc.callbackUri
try:
resp, _ = client.do_request(url, "GET", expected_status=[204])
except sol_ex.SolException as e:
# any sort of error is considered. avoid 500 error.
raise sol_ex.TestNotificationFailed() from e
if resp.status_code != 204:
raise sol_ex.TestNotificationFailed()
def get_matched_subscs(context, inst, notif_type, alarm):
subscs = []
for subsc in get_subsc_all(context):
# subsc: FmSubscription
if not subsc.obj_attr_is_set('filter'):
# no filter. get it.
subscs.append(subsc)
continue
# subsc.filter: FmNotificationsFilter
# - vnfInstanceSubscriptionFilter 0..1
# - notificationTypes 0..N
# - faultyResourceTypes 0..N
# - perceivedSeverities 0..N
# - eventTypes 0..N
# - probableCauses 0..N
if alarm.obj_attr_is_set('rootCauseFaultyResource'):
alarm_faulty_res_type = (
alarm.rootCauseFaultyResource.faultyResourceType)
else:
alarm_faulty_res_type = None
if subsc.filter.obj_attr_is_set('vnfInstanceSubscriptionFilter'):
inst_filter = subsc.filter.vnfInstanceSubscriptionFilter
if not subsc_utils.match_inst_subsc_filter(inst_filter, inst):
continue
if subsc.filter.obj_attr_is_set('notificationTypes'):
if notif_type not in subsc.filter.notificationTypes:
continue
if (alarm_faulty_res_type is not None and
subsc.filter.obj_attr_is_set('faultyResourceTypes')):
if alarm_faulty_res_type not in subsc.filter.faultyResourceTypes:
continue
if (alarm.perceivedSeverity is not None and
subsc.filter.obj_attr_is_set('perceivedSeverities')):
if alarm.perceivedSeverity not in subsc.filter.perceivedSeverities:
continue
if (alarm.eventType is not None and
subsc.filter.obj_attr_is_set('eventTypes')):
if alarm.eventType not in subsc.filter.eventTypes:
continue
if (alarm.probableCause is not None and
subsc.filter.obj_attr_is_set('probableCauses')):
if alarm.probableCause not in subsc.filter.probableCauses:
continue
# OK, matched
subscs.append(subsc)
return subscs
def get_alarm_subscs(context, alarm, inst):
if alarm.obj_attr_is_set('alarmClearedTime'):
return get_matched_subscs(
context, inst, 'AlarmClearedNotification', alarm)
return get_matched_subscs(
context, inst, 'AlarmNotification', alarm)

View File

@ -0,0 +1,70 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from importlib import import_module
module_and_class = {
'stub':
('tacker.sol_refactored.common.monitoring_plugin_base',
'MonitoringPluginStub'),
'pm_event':
('tacker.sol_refactored.common.prometheus_plugin',
'PrometheusPluginPm'),
'alert':
('tacker.sol_refactored.common.prometheus_plugin',
'PrometheusPluginFm'),
'auto_healing':
('tacker.sol_refactored.common.prometheus_plugin',
'PrometheusPluginAutoScaling'),
}
def get_class(short_name):
module = import_module(module_and_class[short_name][0])
return getattr(module, module_and_class[short_name][1])
class MonitoringPlugin():
@staticmethod
def get_instance(_class):
return _class.instance()
def set_callback(self, notification_callback):
pass
def create_job(self, **kwargs):
pass
def delete_job(self, **kwargs):
pass
def alert(self, **kwargs):
pass
class MonitoringPluginStub(MonitoringPlugin):
_instance = None
@staticmethod
def instance():
if not MonitoringPluginStub._instance:
MonitoringPluginStub()
return MonitoringPluginStub._instance
def __init__(self):
if MonitoringPluginStub._instance:
raise SystemError(
"Not constructor but instance() should be used.")
MonitoringPluginStub._instance = self

View File

@ -0,0 +1,175 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
from oslo_log import log as logging
from oslo_utils import uuidutils
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__)
CONF = config.CONF
TEST_NOTIFICATION_TIMEOUT = 20 # seconds
def update_report(context, job_id, report, timestamp):
# update reports in the pmJob
pm_job = get_pm_job(context, job_id)
job_report = _gen_job_report(job_id, report, timestamp)
if pm_job.obj_attr_is_set('reports'):
pm_job.reports.append(job_report)
else:
pm_job.reports = [job_report]
return pm_job
def _gen_job_report(job_id, report, timestamp):
return objects.VnfPmJobV2_Reports(
href=f'/vnfpm/v2/pm_jobs/{job_id}/reports/{report.id}',
readyTime=timestamp
)
def get_pm_job_all(context, marker=None):
# get all pm-job
return objects.PmJobV2.get_all(context, marker)
def get_pm_job(context, pm_job_id):
# get the PM job from DB.
pm_job = objects.PmJobV2.get_by_id(context, pm_job_id)
if pm_job is None:
raise sol_ex.PMJobNotExist()
return pm_job
def get_pm_report(context, pm_job_id, report_id=None):
if report_id:
pm_report = objects.PerformanceReportV2.get_by_filter(
context, id=report_id, jobId=pm_job_id)
if not pm_report:
raise sol_ex.PMReportNotExist()
return pm_report[0]
pm_reports = objects.PerformanceReportV2.get_by_filter(
context, jobId=pm_job_id)
return pm_reports
def pm_job_href(pm_job_id, endpoint):
return f"{endpoint}/vnfpm/v2/pm_jobs/{pm_job_id}"
def make_pm_job_links(pm_job, endpoint):
links = objects.VnfPmJobV2_Links()
links.self = objects.Link(href=pm_job_href(pm_job.id, endpoint))
links_objects = []
for objects_id in pm_job.objectInstanceIds:
links_objects.append(objects.Link(
href=inst_utils.inst_href(objects_id, endpoint)))
links.objects = links_objects
return links
def _get_notification_auth_handle(pm_job):
if not pm_job.obj_attr_is_set('authentication'):
return http_client.NoAuthHandle()
if pm_job.authentication.obj_attr_is_set('paramsBasic'):
param = pm_job.authentication.paramsBasic
return http_client.BasicAuthHandle(param.userName, param.password)
if pm_job.authentication.obj_attr_is_set(
'paramsOauth2ClientCredentials'):
param = pm_job.authentication.paramsOauth2ClientCredentials
return http_client.OAuth2AuthHandle(
None, param.tokenEndpoint, param.clientId, param.clientPassword)
return None
def test_notification(pm_job):
auth_handle = _get_notification_auth_handle(pm_job)
client = http_client.HttpClient(auth_handle,
version=api_version.CURRENT_PM_VERSION,
timeout=TEST_NOTIFICATION_TIMEOUT)
url = pm_job.callbackUri
try:
resp, _ = client.do_request(url, "GET", expected_status=[204])
except sol_ex.SolException as e:
# any sort of error is considered. avoid 500 error.
raise sol_ex.TestNotificationFailed() from e
if resp.status_code != 204:
raise sol_ex.TestNotificationFailed()
def make_pm_notif_data(instance_id, sub_instance_ids, report_id,
pm_job, timestamp, endpoint):
notif_data = objects.PerformanceInformationAvailableNotificationV2(
id=uuidutils.generate_uuid(),
notificationType="PerformanceInformationAvailableNotification",
timeStamp=timestamp,
pmJobId=pm_job.id,
objectType=pm_job.objectType,
objectInstanceId=instance_id,
_links=objects.PerformanceInformationAvailableNotificationV2_Links(
objectInstance=objects.NotificationLink(
href=inst_utils.inst_href(instance_id, endpoint)),
pmJob=objects.NotificationLink(
href=pm_job_href(pm_job.id, endpoint)),
performanceReport=objects.NotificationLink(
href=f"{endpoint}/vnfpm/v2/pm_jobs/{pm_job.id}/"
f"reports/{report_id}"
)
)
)
if sub_instance_ids:
notif_data.subObjectInstanceIds = sub_instance_ids
return notif_data
def async_call(func):
def inner(*args, **kwargs):
th = threading.Thread(target=func, args=args,
kwargs=kwargs, daemon=True)
th.start()
return inner
@async_call
def send_notification(pm_job, notif_data):
auth_handle = _get_notification_auth_handle(pm_job)
client = http_client.HttpClient(auth_handle,
version=api_version.CURRENT_PM_VERSION)
url = pm_job.callbackUri
try:
resp, _ = client.do_request(
url, "POST", expected_status=[204], body=notif_data)
except sol_ex.SolException:
# it may occur if test_notification was not executed.
LOG.exception("send_notification failed")
if resp.status_code != 204:
LOG.error(f'send_notification failed: {resp.status_code}')

View File

@ -0,0 +1,869 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import json
import os
import paramiko
import re
import tempfile
from oslo_log import log as logging
from oslo_utils import uuidutils
from tacker.sol_refactored.api import prometheus_plugin_validator as validator
from tacker.sol_refactored.api.schemas import prometheus_plugin_schemas
from tacker.sol_refactored.common import config as cfg
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import monitoring_plugin_base as mon_base
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.conductor import conductor_rpc_v2 as rpc
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class PrometheusPlugin():
def __init__(self):
self.rpc = rpc.PrometheusPluginConductor()
def parse_datetime(self, isodate):
t = (isodate if isinstance(isodate, datetime.datetime)
else datetime.datetime.fromisoformat(
isodate.replace('Z', '+00:00')))
return t if t.tzinfo else t.astimezone()
class PrometheusPluginPm(PrometheusPlugin, mon_base.MonitoringPlugin):
_instance = None
@staticmethod
def instance():
if PrometheusPluginPm._instance is None:
if not CONF.prometheus_plugin.performance_management:
stub = mon_base.MonitoringPluginStub.instance()
PrometheusPluginPm._instance = stub
else:
PrometheusPluginPm()
return PrometheusPluginPm._instance
def __init__(self):
if PrometheusPluginPm._instance:
raise SystemError(
"Not constructor but instance() should be used.")
super(PrometheusPluginPm, self).__init__()
self.notification_callback = None
auth_handle = http_client.NoAuthHandle()
self.client = http_client.HttpClient(auth_handle)
self.reporting_period_margin = (
CONF.prometheus_plugin.reporting_period_margin)
self.notification_callback = self.default_callback
self.sol_exp_map = {
'VCpuUsageMeanVnf':
'avg(sum(rate(pod_cpu_usage_seconds_total'
'{{pod=~"{pod}"}}[{reporting_period}s]))',
'VCpuUsagePeakVnf':
'max(sum(rate(pod_cpu_usage_seconds_total'
'{{pod=~"{pod}"}}[{reporting_period}s]))',
'VMemoryUsageMeanVnf':
'avg(pod_memory_working_set_bytes{{pod=~"{pod}"}} / '
'on(pod) (kube_node_status_capacity{{resource="memory"}} * '
'on(node) group_right kube_pod_info))',
'VMemoryUsagePeakVnf':
'max(pod_memory_working_set_bytes{{pod=~"{pod}"}} / '
'on(pod) (kube_node_status_capacity{{resource="memory"}} * '
'on(node) group_right kube_pod_info))',
'VDiskUsageMeanVnf':
'avg(container_fs_usage_bytes{{container="{container}",'
'pod=~"{pod}"}}/container_fs_limit_bytes{{container='
'"{container}",pod=~"{pod}"}}))',
'VDiskUsagePeakVnf':
'max(container_fs_usage_bytes{{container="{container}",'
'pod=~"{pod}"}}/container_fs_limit_bytes{{container='
'"{container}",pod=~"{pod}"}}))',
'ByteIncomingVnfIntCp':
'sum(container_network_receive_bytes_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'PacketIncomingVnfIntCp':
'sum(container_network_receive_packets_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'ByteOutgoingVnfIntCp':
'sum(container_network_transmit_bytes_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'PacketOutgoingVnfIntCp':
'sum(container_network_transmit_packets_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'ByteIncomingVnfExtCp':
'sum(container_network_receive_bytes_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'PacketIncomingVnfExtCp':
'sum(container_network_receive_packets_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'ByteOutgoingVnfExtCp':
'sum(container_network_transmit_bytes_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
'PacketOutgoingVnfExtCp':
'sum(container_network_transmit_packets_total'
'{{interface="{sub_object_instance_id}",pod=~"{pod}"}})',
}
PrometheusPluginPm._instance = self
def set_callback(self, notification_callback):
self.notification_callback = notification_callback
def create_job(self, **kwargs):
return self.make_rules(kwargs['context'], kwargs['pm_job'])
def delete_job(self, **kwargs):
self.delete_rules(kwargs['context'], kwargs['pm_job'])
def alert(self, **kwargs):
try:
self._alert(kwargs['request'], body=kwargs['body'])
except Exception as e:
# All exceptions is ignored here and 204 response will always
# be returned. Because when tacker responds error to alertmanager,
# alertmanager may repeat the same reports.
LOG.error("%s: %s", e.__class__.__name__, e.args[0])
def default_callback(self, context, entries):
self.rpc.store_job_info(context, entries)
def convert_measurement_unit(self, metric, value):
if re.match(r'^V(Cpu|Memory|Disk)Usage(Mean|Peak)Vnf\..+', metric):
value = float(value)
elif re.match(r'^(Byte|Packet)(Incoming|Outgoing)Vnf(IntCp|ExtCp)',
metric):
value = int(value)
else:
raise sol_ex.PrometheusPluginError(
"Failed to convert annotations.value to measurement unit.")
return value
def get_datetime_of_latest_report(
self, context, pm_job, object_instance_id,
sub_object_instance_id, metric):
report = pm_job_utils.get_pm_report(context, pm_job.id)
if not report:
return None
entries_of_same_object = list(
filter(
lambda x: (
x.objectInstanceId == object_instance_id and
(not x.obj_attr_is_set('subObjectInstanceId') or
x.subObjectInstanceId == sub_object_instance_id) and
x.performanceMetric == metric),
report.entries))
if len(entries_of_same_object) == 0:
return None
values = sum(list(map(
lambda x: x.performanceValues, entries_of_same_object)), [])
return max(values, key=lambda value:
self.parse_datetime(value.timeStamp)).timeStamp
def filter_alert_by_time(
self, context, pm_job, datetime_now,
object_instance_id, sub_object_instance_id, metric):
# Ignore expired alert
reporting_boundary = pm_job.criteria.reportingBoundary\
if (pm_job.criteria.obj_attr_is_set('reportingBoundary') and
pm_job.criteria.reportingBoundary) else None
if (reporting_boundary and
datetime_now > self.parse_datetime(reporting_boundary)):
raise sol_ex.PrometheusPluginSkipped()
# Ignore short period alert
report_date = self.get_datetime_of_latest_report(
context, pm_job, object_instance_id, sub_object_instance_id,
metric)
# reporting_period_margin is some margin for timing inconsistency
# between prometheus and tacker.
if (report_date and report_date + datetime.timedelta(
seconds=(pm_job.criteria.reportingPeriod -
self.reporting_period_margin)) >= datetime_now):
raise sol_ex.PrometheusPluginSkipped()
def valid_alert(self, pm_job, object_instance_id, sub_object_instance_id):
object_instance_ids = (
pm_job.objectInstanceIds
if (pm_job.obj_attr_is_set('objectInstanceIds') and
pm_job.objectInstanceIds) else [])
if object_instance_id not in object_instance_ids:
LOG.error(
f"labels.object_instance_id {object_instance_id} "
f"doesn't match pmJob.")
raise sol_ex.PrometheusPluginSkipped()
sub_object_instance_ids = (
pm_job.subObjectInstanceIds
if (pm_job.obj_attr_is_set('subObjectInstanceIds') and
pm_job.subObjectInstanceIds) else [])
if (sub_object_instance_id and
(not sub_object_instance_ids or
sub_object_instance_id not in sub_object_instance_ids)):
LOG.error(
f"labels.sub_object_instance_id {sub_object_instance_id} "
f"doesn't match pmJob.")
raise sol_ex.PrometheusPluginSkipped()
@validator.schema(prometheus_plugin_schemas.AlertMessage)
def _alert(self, request, body):
result = []
context = request.context
datetime_now = datetime.datetime.now(datetime.timezone.utc)
for alt in body['alerts']:
if alt['labels']['function_type'] != 'vnfpm':
continue
try:
pm_job_id = alt['labels']['job_id']
object_instance_id = alt['labels']['object_instance_id']
metric = alt['labels']['metric']
sub_object_instance_id = alt['labels'].get(
'sub_object_instance_id')
value = alt['annotations']['value']
pm_job = pm_job_utils.get_pm_job(context, pm_job_id)
self.filter_alert_by_time(context, pm_job, datetime_now,
object_instance_id,
sub_object_instance_id, metric)
self.valid_alert(
pm_job, object_instance_id, sub_object_instance_id)
value = self.convert_measurement_unit(metric, value)
result.append({
'objectType': pm_job.objectType,
'objectInstanceId': object_instance_id,
'subObjectInstanceId': sub_object_instance_id,
'performanceMetric': metric,
'performanceValues': [{
'timeStamp': datetime_now,
'value': value
}]
})
except sol_ex.PrometheusPluginSkipped:
pass
if len(result) > 0:
if self.notification_callback:
# every job_id in body['alerts'] has same id
self.notification_callback(context, {
'id': uuidutils.generate_uuid(),
'jobId': pm_job_id,
'entries': result,
})
return result
def decompose_metrics_vnfc(self, pm_job):
metrics = pm_job.criteria.performanceMetric\
if pm_job.criteria.obj_attr_is_set('performanceMetric') else None
metrics = (list(filter(lambda x: (
re.match(r'^V(Cpu|Memory|Disk)Usage(Mean|Peak)Vnf\..+', x) and
re.sub(r'^V(Cpu|Memory|Disk)Usage(Mean|Peak)Vnf\.', '',
x) in pm_job.objectInstanceIds), metrics))
if metrics else [])
metric_grps = pm_job.criteria.performanceMetricGroup\
if (pm_job.criteria.obj_attr_is_set('performanceMetricGroup') and
pm_job.criteria.performanceMetricGroup) else []
for obj in pm_job.objectInstanceIds:
for grp in metric_grps:
if grp == 'VirtualizedComputeResource':
metrics.append(f'VCpuUsageMeanVnf.{obj}')
metrics.append(f'VCpuUsagePeakVnf.{obj}')
metrics.append(f'VMemoryUsageMeanVnf.{obj}')
metrics.append(f'VMemoryUsagePeakVnf.{obj}')
metrics.append(f'VDiskUsageMeanVnf.{obj}')
metrics.append(f'VDiskUsagePeakVnf.{obj}')
metrics = list(set(metrics))
if len(metrics) == 0:
raise sol_ex.PrometheusPluginError(
"Invalid performanceMetric or performanceMetricGroup."
)
return metrics
def make_prom_ql(self, target, pod, container='', collection_period=30,
reporting_period=60, sub_object_instance_id='*'):
reporting_period = max(reporting_period, 30)
expr = self.sol_exp_map[target].format(
pod=pod,
container=container,
collection_period=collection_period,
reporting_period=reporting_period,
sub_object_instance_id=sub_object_instance_id
)
return expr
def make_rule(self, pm_job, object_instance_id, sub_object_instance_id,
metric, expression, collection_period):
labels = {
'alertname': '',
'receiver_type': 'tacker',
'function_type': 'vnfpm',
'job_id': pm_job.id,
'object_instance_id': object_instance_id,
'sub_object_instance_id': sub_object_instance_id,
'metric': metric
}
labels = {k: v for k, v in labels.items() if v is not None}
annotations = {
'value': r'{{$value}}'
}
rule = {
'alert': uuidutils.generate_uuid(),
'expr': expression,
'for': f'{collection_period}s',
'labels': labels,
'annotations': annotations
}
return rule
def get_vnfc_resource_info(self, _, vnf_instance_id, inst_map):
inst = inst_map[vnf_instance_id]
if not inst.obj_attr_is_set('instantiatedVnfInfo') or\
not inst.instantiatedVnfInfo.obj_attr_is_set(
'vnfcResourceInfo'):
return None
return inst.instantiatedVnfInfo.vnfcResourceInfo
def get_pod_regexp(self, resource_info):
# resource ids are like:
# ['test-test1-756757f8f-xcwmt',
# 'test-test2-756757f8f-kmghr', ...]
# convert them to a regex string such as:
# '(test\-test1\-[0-9a-f]{1,10}-[0-9a-z]{5}$|
# test\-test2\-[0-9a-f]{1,10}-[0-9a-z]{5}$|...)'
deployments = list(filter(
lambda r:
r.computeResource.obj_attr_is_set(
'vimLevelResourceType')
and r.computeResource.obj_attr_is_set(
'resourceId'
)
and r.computeResource.vimLevelResourceType ==
'Deployment', resource_info
))
deployments = list(set(list(map(
lambda d: re.escape(re.sub(
r'\-[0-9a-f]{1,10}-[0-9a-z]{5}$', '',
d.computeResource.resourceId)) +
r'\-[0-9a-f]{1,10}-[0-9a-z]{5}$',
deployments
))))
pods_regexp = '(' + '|'.join(deployments) + ')'
return deployments, pods_regexp
def _make_rules_for_each_obj(self, context, pm_job, inst_map, metric):
target = re.sub(r'\..+$', '', metric)
objs = pm_job.objectInstanceIds
collection_period = pm_job.criteria.collectionPeriod
reporting_period = pm_job.criteria.reportingPeriod
rules = []
for obj in objs:
# resource ids are like:
# ['test-test1-756757f8f-xcwmt',
# 'test-test2-756757f8f-kmghr', ...]
# convert them to a regex string such as:
# '(test\-test1\-[0-9a-f]{1,10}-[0-9a-z]{5}$|
# test\-test2\-[0-9a-f]{1,10}-[0-9a-z]{5}$|...)'
resource_info = self.get_vnfc_resource_info(context, obj, inst_map)
if not resource_info:
continue
deployments, pods_regexp = self.get_pod_regexp(resource_info)
if len(deployments) == 0:
continue
expr = self.make_prom_ql(
target, pods_regexp, collection_period=collection_period,
reporting_period=reporting_period)
rules.append(self.make_rule(
pm_job, obj, None, metric, expr,
collection_period))
return rules
def get_compute_resouce_by_sub_obj(self, vnf_instance, sub_obj):
inst = vnf_instance
if (not inst.obj_attr_is_set('instantiatedVnfInfo') or
not inst.instantiatedVnfInfo.obj_attr_is_set(
'vnfcResourceInfo') or
not inst.instantiatedVnfInfo.obj_attr_is_set('vnfcInfo')):
return None
vnfc_info = list(filter(
lambda x: (x.obj_attr_is_set('vnfcResourceInfoId') and
x.id == sub_obj),
inst.instantiatedVnfInfo.vnfcInfo))
if len(vnfc_info) == 0:
return None
resources = list(filter(
lambda x: (vnfc_info[0].obj_attr_is_set('vnfcResourceInfoId') and
x.id == vnfc_info[0].vnfcResourceInfoId and
x.computeResource.obj_attr_is_set('vimLevelResourceType') and
x.computeResource.vimLevelResourceType == 'Deployment' and
x.computeResource.obj_attr_is_set('resourceId')),
inst.instantiatedVnfInfo.vnfcResourceInfo))
if len(resources) == 0:
return None
return resources[0].computeResource
def _make_rules_for_each_sub_obj(self, context, pm_job, inst_map, metric):
target = re.sub(r'\..+$', '', metric)
objs = pm_job.objectInstanceIds
sub_objs = pm_job.subObjectInstanceIds\
if (pm_job.obj_attr_is_set('subObjectInstanceIds') and
pm_job.subObjectInstanceIds) else []
collection_period = pm_job.criteria.collectionPeriod
reporting_period = pm_job.criteria.reportingPeriod
rules = []
resource_info = self.get_vnfc_resource_info(context, objs[0], inst_map)
if not resource_info:
return []
if pm_job.objectType in {'Vnf', 'Vnfc'}:
inst = inst_map[objs[0]]
for sub_obj in sub_objs:
# resource id is like 'test-test1-756757f8f-xcwmt'
# obtain 'test-test1' as deployment
# obtain 'test' as container
compute_resource = self.get_compute_resouce_by_sub_obj(
inst, sub_obj)
if not compute_resource:
continue
resource_id = compute_resource.resourceId
deployment = re.sub(
r'\-[0-9a-f]{1,10}-[0-9a-z]{5}$', '', resource_id)
g = re.match(r'^(.+)\-\1{1,}[0-9]+', deployment)
if not g:
continue
container = g.group(1)
resource_id = re.escape(resource_id)
expr = self.make_prom_ql(
target, resource_id, container=container,
collection_period=collection_period,
reporting_period=reporting_period)
rules.append(self.make_rule(
pm_job, objs[0], sub_obj, metric, expr,
collection_period))
else:
deployments, pods_regexp = self.get_pod_regexp(resource_info)
if len(deployments) == 0:
return []
for sub_obj in sub_objs:
expr = self.make_prom_ql(
target, pods_regexp, collection_period=collection_period,
reporting_period=reporting_period,
sub_object_instance_id=sub_obj)
rules.append(self.make_rule(
pm_job, objs[0], sub_obj, metric, expr,
collection_period))
return rules
def _make_rules(self, context, pm_job, metric, inst_map):
sub_objs = pm_job.subObjectInstanceIds\
if (pm_job.obj_attr_is_set('subObjectInstanceIds') and
pm_job.subObjectInstanceIds) else []
# Cardinality of objectInstanceIds and subObjectInstanceIds
# is N:0 or 1:N.
if len(sub_objs) > 0:
return self._make_rules_for_each_sub_obj(
context, pm_job, inst_map, metric)
return self._make_rules_for_each_obj(
context, pm_job, inst_map, metric)
def decompose_metrics_vnfintextcp(self, pm_job):
group_name = 'VnfInternalCp'\
if pm_job.objectType == 'VnfIntCp' else 'VnfExternalCp'
metrics = pm_job.criteria.performanceMetric\
if pm_job.criteria.obj_attr_is_set('performanceMetric') else None
metrics = list(filter(lambda x: (
re.match(r'^(Byte|Packet)(Incoming|Outgoing)' + pm_job.objectType,
x)),
metrics)) if metrics else []
metric_grps = pm_job.criteria.performanceMetricGroup\
if (pm_job.criteria.obj_attr_is_set('performanceMetricGroup') and
pm_job.criteria.performanceMetricGroup) else []
for grp in metric_grps:
if grp == group_name:
metrics.append(f'ByteIncoming{pm_job.objectType}')
metrics.append(f'ByteOutgoing{pm_job.objectType}')
metrics.append(f'PacketIncoming{pm_job.objectType}')
metrics.append(f'PacketOutgoing{pm_job.objectType}')
metrics = list(set(metrics))
if len(metrics) == 0:
raise sol_ex.PrometheusPluginError(
"Invalid performanceMetric or performanceMetricGroup."
)
return metrics
def _delete_rule(self, host, port, user, password, path, pm_job_id):
with paramiko.Transport(sock=(host, port)) as client:
client.connect(username=user, password=password)
sftp = paramiko.SFTPClient.from_transport(client)
sftp.remove(f'{path}/{pm_job_id}.json')
def delete_rules(self, context, pm_job):
target_list, reload_list = self.get_access_info(pm_job)
for info in target_list:
self._delete_rule(
info['host'], info['port'], info['user'],
info['password'], info['path'], pm_job.id)
for uri in reload_list:
self.reload_prom_server(context, uri)
def decompose_metrics(self, pm_job):
if pm_job.objectType in {'Vnf', 'Vnfc'}:
return self.decompose_metrics_vnfc(pm_job)
if pm_job.objectType in {'VnfIntCp', 'VnfExtCp'}:
return self.decompose_metrics_vnfintextcp(pm_job)
raise sol_ex.PrometheusPluginError(
f"Invalid objectType: {pm_job.objectType}.")
def reload_prom_server(self, context, reload_uri):
resp, _ = self.client.do_request(
reload_uri, "PUT", context=context)
if resp.status_code != 202:
LOG.error("reloading request to prometheus is failed: %d.",
resp.status_code)
def _upload_rule(self, rule_group, host, port, user, password, path,
pm_job_id):
with tempfile.TemporaryDirectory() as tmpdir:
with open(os.path.join(tmpdir, 'rule.json'),
'w+', encoding="utf-8") as fp:
json.dump(rule_group, fp, indent=4, ensure_ascii=False)
filename = fp.name
with paramiko.Transport(sock=(host, port)) as client:
LOG.info("Upload rule files to prometheus server: %s.", host)
client.connect(username=user, password=password)
sftp = paramiko.SFTPClient.from_transport(client)
sftp.put(filename, f'{path}/{pm_job_id}.json')
def get_access_info(self, pm_job):
target_list = []
reload_list = []
if (not pm_job.obj_attr_is_set('metadata')
or 'monitoring' not in pm_job.metadata):
raise sol_ex.PrometheusPluginError(
"monitoring info is missing at metadata field.")
access_info = pm_job.metadata['monitoring']
if (access_info.get('monitorName') != 'prometheus' or
access_info.get('driverType') != 'external'):
raise sol_ex.PrometheusPluginError(
"prometheus info is missing at metadata field.")
for info in access_info.get('targetsInfo', []):
host = info.get('prometheusHost', '')
port = info.get('prometheusHostPort', 22)
auth = info.get('authInfo', {})
user = auth.get('ssh_username', '')
password = auth.get('ssh_password', '')
path = info.get('alertRuleConfigPath', '')
uri = info.get('prometheusReloadApiEndpoint', '')
if not (host and user and path and uri):
continue
target_list.append({
'host': host,
'port': port,
'user': user,
'password': password,
'path': path
})
reload_list.append(uri)
return target_list, list(set(reload_list))
def upload_rules(
self, context, target_list, reload_list, rule_group, pm_job):
for info in target_list:
self._upload_rule(
rule_group, info['host'], info['port'], info['user'],
info['password'], info['path'], pm_job.id)
for uri in reload_list:
self.reload_prom_server(context, uri)
def get_vnf_instances(self, context, pm_job):
object_instance_ids = list(set(pm_job.objectInstanceIds))
return dict(zip(
object_instance_ids,
list(map(
lambda inst: inst_utils.get_inst(context, inst),
object_instance_ids))))
def make_rules(self, context, pm_job):
target_list, reload_list = self.get_access_info(pm_job)
metrics = self.decompose_metrics(pm_job)
inst_map = self.get_vnf_instances(context, pm_job)
rules = sum([self._make_rules(context, pm_job, metric, inst_map)
for metric in metrics], [])
if len(rules) == 0:
raise sol_ex.PrometheusPluginError(
f"Converting from a PM job to alert rules is failed."
f" PM job id: {pm_job.id}")
rule_group = {
'groups': [
{
'name': f'tacker_{pm_job.id}',
'rules': rules
}
]
}
self.upload_rules(
context, target_list, reload_list, rule_group, pm_job)
return rule_group
class PrometheusPluginFm(PrometheusPlugin, mon_base.MonitoringPlugin):
_instance = None
@staticmethod
def instance():
if PrometheusPluginFm._instance is None:
if not CONF.prometheus_plugin.fault_management:
stub = mon_base.MonitoringPluginStub.instance()
PrometheusPluginFm._instance = stub
else:
PrometheusPluginFm()
return PrometheusPluginFm._instance
def __init__(self):
if PrometheusPluginFm._instance:
raise SystemError(
"Not constructor but instance() should be used.")
super(PrometheusPluginFm, self).__init__()
self.notification_callback = self.default_callback
self.endpoint = CONF.v2_vnfm.endpoint
PrometheusPluginFm._instance = self
def set_callback(self, notification_callback):
self.notification_callback = notification_callback
def alert(self, **kwargs):
try:
self._alert(kwargs['request'], body=kwargs['body'])
except Exception as e:
# All exceptions is ignored here and 204 response will always
# be returned. Because when tacker responds error to alertmanager,
# alertmanager may repeat the same reports.
LOG.error("%s: %s", e.__class__.__name__, e.args[0])
def default_callback(self, context, alarm):
self.rpc.store_alarm_info(context, alarm)
def vnfc_instance_ids(
self, context, vnf_instance_id, alert_entry):
inst = inst_utils.get_inst(context, vnf_instance_id)
resources = (inst.instantiatedVnfInfo.vnfcResourceInfo
if inst.obj_attr_is_set('instantiatedVnfInfo') and
inst.instantiatedVnfInfo.obj_attr_is_set(
'vnfcResourceInfo') else [])
vnfc_info = (inst.instantiatedVnfInfo.vnfcInfo
if inst.obj_attr_is_set('instantiatedVnfInfo') and
inst.instantiatedVnfInfo.obj_attr_is_set(
'vnfcInfo') else [])
if 'pod' not in alert_entry['labels']:
return []
pod = alert_entry['labels']['pod']
deployments = list(filter(
lambda r: (
r.computeResource.obj_attr_is_set('vimLevelResourceType') and
r.computeResource.obj_attr_is_set('resourceId') and
(r.computeResource.vimLevelResourceType in
{'Deployment', 'Pod'}) and
re.match(pod, r.computeResource.resourceId)),
resources
))
vnfc_res_info_ids = list(map(
lambda res: res.id, deployments
))
vnfc_info = list(filter(
lambda info: (
info.obj_attr_is_set('vnfcResourceInfoId') and
info.vnfcResourceInfoId in vnfc_res_info_ids),
vnfc_info
))
vnfc_info = list(map(lambda info: info.id, vnfc_info))
return vnfc_info
def update_alarm(self, context, not_cleared, ends_at, datetime_now):
for alm in not_cleared:
alm.alarmClearedTime = ends_at
alm.alarmChangedTime = datetime_now
if self.notification_callback:
self.notification_callback(context, alm)
def create_new_alarm(self, context, alert_entry, datetime_now):
vnf_instance_id = alert_entry['labels']['vnf_instance_id']
fingerprint = alert_entry['fingerprint']
perceived_severity = alert_entry['labels']['perceived_severity']
fault_details = [
f"fingerprint: {fingerprint}",
f"detail: {alert_entry['annotations'].get('fault_details')}"
]
vnfc_instance_ids = self.vnfc_instance_ids(
context, vnf_instance_id, alert_entry)
if len(vnfc_instance_ids) == 0:
LOG.error("failed to specify vnfc_instance for the alert.")
raise sol_ex.PrometheusPluginSkipped()
new_alarm = objects.AlarmV1.from_dict({
'id':
uuidutils.generate_uuid(),
'managedObjectId':
vnf_instance_id,
'vnfcInstanceIds':
vnfc_instance_ids,
'alarmRaisedTime':
datetime_now.isoformat(),
'ackState':
'UNACKNOWLEDGED',
'perceivedSeverity':
perceived_severity,
'eventTime':
alert_entry['startsAt'],
'eventType':
alert_entry['labels'].get('event_type', ''),
'faultType':
alert_entry['annotations'].get('fault_type', ''),
'probableCause':
alert_entry['annotations'].get('probable_cause', ''),
'isRootCause':
False,
'faultDetails':
fault_details,
'_links': {}
})
_links = fm_alarm_utils.make_alarm_links(new_alarm, self.endpoint)
new_alarm._links = _links
if self.notification_callback:
self.notification_callback(context, new_alarm)
return new_alarm
def get_not_cleared_alarms(self, context, vnf_instance_id, fingerprint):
alms = fm_alarm_utils.get_not_cleared_alarms(context, vnf_instance_id)
fpstr = f'fingerprint: {fingerprint}'
return list(filter(
lambda x: (not x.obj_attr_is_set('alarmClearedTime') and
x.obj_attr_is_set('faultDetails') and
fpstr in x.faultDetails), alms))
def create_or_update_alarm(
self, context, alert_entry, datetime_now):
status = alert_entry['status']
vnf_instance_id = alert_entry['labels']['vnf_instance_id']
fingerprint = alert_entry['fingerprint']
not_cleared = self.get_not_cleared_alarms(
context, vnf_instance_id, fingerprint)
if status == 'resolved' and len(not_cleared) > 0:
ends_at = alert_entry['endsAt']
self.update_alarm(
context, not_cleared, ends_at, datetime_now)
return not_cleared
if status == 'firing' and len(not_cleared) == 0:
new_alarm = self.create_new_alarm(
context, alert_entry, datetime_now)
return [new_alarm]
raise sol_ex.PrometheusPluginSkipped()
@validator.schema(prometheus_plugin_schemas.AlertMessage)
def _alert(self, request, body):
now = datetime.datetime.now(datetime.timezone.utc)
result = []
for alt in body['alerts']:
if alt['labels']['function_type'] != 'vnffm':
continue
try:
alarms = self.create_or_update_alarm(
request.context, alt, now)
result.extend(alarms)
except sol_ex.PrometheusPluginSkipped:
pass
return result
class PrometheusPluginAutoScaling(PrometheusPlugin, mon_base.MonitoringPlugin):
_instance = None
@staticmethod
def instance():
if PrometheusPluginAutoScaling._instance is None:
if not CONF.prometheus_plugin.auto_scaling:
stub = mon_base.MonitoringPluginStub.instance()
PrometheusPluginAutoScaling._instance = stub
else:
PrometheusPluginAutoScaling()
return PrometheusPluginAutoScaling._instance
def __init__(self):
if PrometheusPluginAutoScaling._instance:
raise SystemError(
"Not constructor but instance() should be used.")
super(PrometheusPluginAutoScaling, self).__init__()
self.notification_callback = self.default_callback
PrometheusPluginAutoScaling._instance = self
def set_callback(self, notification_callback):
self.notification_callback = notification_callback
def alert(self, **kwargs):
try:
self._alert(kwargs['request'], body=kwargs['body'])
except Exception as e:
# All exceptions is ignored here and 204 response will always
# be returned. Because when tacker responds error to alertmanager,
# alertmanager may repeat the same reports.
LOG.error("%s: %s", e.__class__.__name__, e.args[0])
def default_callback(self, context, vnf_instance_id, scaling_param):
self.rpc.request_scale(context, vnf_instance_id, scaling_param)
def skip_if_auto_scale_not_enabled(self, vnf_instance):
if (not vnf_instance.obj_attr_is_set('vnfConfigurableProperties') or
not vnf_instance.vnfConfigurableProperties.get(
'isAutoscaleEnabled')):
raise sol_ex.PrometheusPluginSkipped()
def process_auto_scale(self, request, vnf_instance_id, auto_scale_type,
aspect_id):
scaling_param = {
'type': auto_scale_type,
'aspectId': aspect_id,
}
context = request.context
if self.notification_callback:
self.notification_callback(context, vnf_instance_id, scaling_param)
@validator.schema(prometheus_plugin_schemas.AlertMessage)
def _alert(self, request, body):
result = []
for alt in body['alerts']:
if alt['labels']['function_type'] != 'auto_scale':
continue
try:
vnf_instance_id = alt['labels']['vnf_instance_id']
auto_scale_type = alt['labels']['auto_scale_type']
aspect_id = alt['labels']['aspect_id']
context = request.context
inst = inst_utils.get_inst(context, vnf_instance_id)
self.skip_if_auto_scale_not_enabled(inst)
self.process_auto_scale(
request, vnf_instance_id, auto_scale_type, aspect_id)
result.append((vnf_instance_id, auto_scale_type, aspect_id))
except sol_ex.PrometheusPluginSkipped:
pass
return result

View File

@ -84,13 +84,15 @@ def send_notification(subsc, notif_data):
url = subsc.callbackUri
try:
resp, body = client.do_request(url, "POST", body=notif_data)
if resp.status_code != 204:
LOG.error("send_notification failed: %d" % resp.status_code)
except Exception:
resp, body = client.do_request(
url, "POST", expected_status=[204], body=notif_data)
except sol_ex.SolException:
# it may occur if test_notification was not executed.
LOG.exception("send_notification failed")
if resp.status_code != 204:
LOG.error("send_notification failed: %d" % resp.status_code)
def test_notification(subsc):
auth_handle = _get_notification_auth_handle(subsc)
@ -100,11 +102,12 @@ def test_notification(subsc):
url = subsc.callbackUri
try:
resp, _ = client.do_request(url, "GET")
if resp.status_code != 204:
raise sol_ex.TestNotificationFailed()
except Exception:
resp, _ = client.do_request(url, "GET", expected_status=[204])
except sol_ex.SolException as e:
# any sort of error is considered. avoid 500 error.
raise sol_ex.TestNotificationFailed() from e
if resp.status_code != 204:
raise sol_ex.TestNotificationFailed()

View File

@ -50,3 +50,31 @@ class VnfLcmRpcApiV2(object):
def modify_vnfinfo(self, context, lcmocc_id):
self._cast_lcm_op(context, lcmocc_id, 'modify_vnfinfo')
TOPIC_PROMETHEUS_PLUGIN = 'TACKER_PROMETHEUS_PLUGIN'
class PrometheusPluginConductor(object):
target = oslo_messaging.Target(
exchange='tacker',
topic=TOPIC_PROMETHEUS_PLUGIN,
fanout=False,
version='1.0')
def cast(self, context, method, **kwargs):
serializer = objects_base.TackerObjectSerializer()
client = rpc.get_client(
self.target, version_cap=None, serializer=serializer)
cctxt = client.prepare()
cctxt.cast(context, method, **kwargs)
def store_alarm_info(self, context, alarm):
self.cast(context, 'store_alarm_info', alarm=alarm)
def store_job_info(self, context, report):
self.cast(context, 'store_job_info', report=report)
def request_scale(self, context, id, scale_req):
self.cast(context, 'request_scale', id=id, scale_req=scale_req)

View File

@ -22,7 +22,10 @@ from tacker.sol_refactored.common import coordinate
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.conductor import prometheus_plugin_driver as pp_drv
from tacker.sol_refactored.conductor import vnffm_driver_v1
from tacker.sol_refactored.conductor import vnflcm_driver_v2
from tacker.sol_refactored.conductor import vnfpm_driver_v2
from tacker.sol_refactored.nfvo import nfvo_client
from tacker.sol_refactored import objects
from tacker.sol_refactored.objects.v2 import fields
@ -37,8 +40,11 @@ class ConductorV2(object):
def __init__(self):
self.vnflcm_driver = vnflcm_driver_v2.VnfLcmDriverV2()
self.vnffm_driver = vnffm_driver_v1.VnfFmDriverV1()
self.vnfpm_driver = vnfpm_driver_v2.VnfPmDriverV2()
self.endpoint = CONF.v2_vnfm.endpoint
self.nfvo_client = nfvo_client.NfvoClient()
self.prom_driver = pp_drv.PrometheusPluginDriver.instance()
self._change_lcm_op_state()
@ -313,3 +319,14 @@ class ConductorV2(object):
# send notification COMPLETED or FAILED_TEMP
self.nfvo_client.send_lcmocc_notification(context, lcmocc, inst,
self.endpoint)
def store_alarm_info(self, context, alarm):
self.vnffm_driver.store_alarm_info(context, alarm)
def store_job_info(self, context, report):
# call pm_driver
self.vnfpm_driver.store_job_info(context, report)
@log.log
def request_scale(self, context, id, scale_req):
self.prom_driver.request_scale(context, id, scale_req)

View File

@ -0,0 +1,63 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.common import config as cfg
from tacker.sol_refactored.common import http_client
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class PrometheusPluginDriverStub():
def request_scale(self, context, vnf_instance_id, scale_req):
pass
class PrometheusPluginDriver():
_instance = None
@staticmethod
def instance():
if PrometheusPluginDriver._instance is None:
if (CONF.prometheus_plugin.auto_scaling or
CONF.prometheus_plugin.fault_management or
CONF.prometheus_plugin.performance_management):
PrometheusPluginDriver()
else:
stub = PrometheusPluginDriverStub()
PrometheusPluginDriver._instance = stub
return PrometheusPluginDriver._instance
def __init__(self):
if PrometheusPluginDriver._instance:
raise SystemError("Not constructor but instance() should be used.")
auth_handle = http_client.KeystonePasswordAuthHandle(
auth_url=CONF.keystone_authtoken.auth_url,
username=CONF.keystone_authtoken.username,
password=CONF.keystone_authtoken.password,
project_name=CONF.keystone_authtoken.project_name,
user_domain_name=CONF.keystone_authtoken.user_domain_name,
project_domain_name=CONF.keystone_authtoken.project_domain_name)
self.client = http_client.HttpClient(auth_handle)
PrometheusPluginDriver._instance = self
def request_scale(self, context, vnf_instance_id, scale_req):
ep = CONF.v2_vnfm.endpoint
url = f'{ep}/vnflcm/v2/vnf_instances/{vnf_instance_id}/scale'
resp, _ = self.client.do_request(
url, "POST", context=context, body=scale_req, version="2.0.0")
LOG.info("AutoHealing request is processed: %d.", resp.status_code)

View File

@ -27,3 +27,6 @@ class ConductorV2Hook(object):
service.conn.create_consumer(
conductor_rpc_v2.TOPIC_CONDUCTOR_V2, endpoints,
serializer=serializer)
service.conn.create_consumer(
conductor_rpc_v2.TOPIC_PROMETHEUS_PLUGIN, endpoints,
serializer=serializer)

View File

@ -0,0 +1,50 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.nfvo import nfvo_client
LOG = logging.getLogger(__name__)
CONF = config.CONF
class VnfFmDriverV1():
def __init__(self):
self.endpoint = CONF.v2_vnfm.endpoint
self.nfvo_client = nfvo_client.NfvoClient()
def store_alarm_info(self, context, alarm):
# store alarm into DB
try:
alarm_utils.get_alarm(context, alarm.id)
with context.session.begin(subtransactions=True):
alarm.update(context)
except sol_ex.AlarmNotFound:
with context.session.begin(subtransactions=True):
alarm.create(context)
# get inst
inst = inst_utils.get_inst(context, alarm.managedObjectId)
# send notification
self.nfvo_client.send_alarm_notification(
context, alarm, inst, self.endpoint)

View File

@ -0,0 +1,58 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.nfvo import nfvo_client
from tacker.sol_refactored import objects
CONF = config.CONF
class VnfPmDriverV2():
def __init__(self):
self.endpoint = CONF.v2_vnfm.endpoint
self.nfvo_client = nfvo_client.NfvoClient()
def store_job_info(self, context, report):
# store report into db
report = self._store_report(context, report)
# update job reports
job_id = report.jobId
timestamp = report.entries[0].performanceValues[0].timeStamp
pm_job = self._update_job_reports(
context, job_id, report, timestamp)
# Send a notify pm job request to the NFVO client.
# POST /{pmjob.callbackUri}
self.nfvo_client.send_pm_job_notification(
report, pm_job, timestamp, self.endpoint)
def _store_report(self, context, report):
report = objects.PerformanceReportV2.from_dict(report)
report.create(context)
return report
def _update_job_reports(self, context, job_id, report, timestamp):
# update reports in the pmJob
update_job = pm_job_utils.update_report(
context, job_id, report, timestamp)
with context.session.begin(subtransactions=True):
update_job.update(context)
return update_job

View File

@ -0,0 +1,57 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.api import prometheus_plugin_wsgi as prom_wsgi
from tacker.sol_refactored.common import config as cfg
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import monitoring_plugin_base as mon_base
CONF = cfg.CONF
class PmEventController(prom_wsgi.PrometheusPluginAPIController):
def pm_event(self, request, body):
if not CONF.prometheus_plugin.performance_management:
raise sol_ex.PrometheusPluginNotEnabled(
name='Performance management')
cls = mon_base.get_class('pm_event')
mon_base.MonitoringPlugin.get_instance(cls).alert(
request=request, body=body)
return prom_wsgi.PrometheusPluginResponse(204, None)
class FmAlertController(prom_wsgi.PrometheusPluginAPIController):
def alert(self, request, body):
if not CONF.prometheus_plugin.fault_management:
raise sol_ex.PrometheusPluginNotEnabled(
name='Fault management')
cls = mon_base.get_class('alert')
mon_base.MonitoringPlugin.get_instance(cls).alert(
request=request, body=body)
return prom_wsgi.PrometheusPluginResponse(204, None)
class AutoScalingController(prom_wsgi.PrometheusPluginAPIController):
def auto_scaling(self, request, body):
if not CONF.prometheus_plugin.auto_scaling:
raise sol_ex.PrometheusPluginNotEnabled(
name='Auto scaling')
cls = mon_base.get_class('auto_healing')
mon_base.MonitoringPlugin.get_instance(cls).alert(
request=request, body=body)
return prom_wsgi.PrometheusPluginResponse(204, None)
def auto_scaling_id(self, request, _, body):
return self.auto_scaling(request, body)

View File

@ -0,0 +1,200 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_utils import uuidutils
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.api.schemas import vnffm_v1 as schema
from tacker.sol_refactored.api import validator
from tacker.sol_refactored.api import wsgi as sol_wsgi
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import coordinate
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils
from tacker.sol_refactored.common import fm_subscription_utils as subsc_utils
from tacker.sol_refactored.controller import vnffm_view
from tacker.sol_refactored.nfvo import nfvo_client
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__) # NOTE: unused at the moment
CONF = config.CONF
class VnfFmControllerV1(sol_wsgi.SolAPIController):
def __init__(self):
self.nfvo_client = nfvo_client.NfvoClient()
self.endpoint = CONF.v2_vnfm.endpoint
self._fm_view = vnffm_view.AlarmViewBuilder(self.endpoint)
self._subsc_view = vnffm_view.FmSubscriptionViewBuilder(self.endpoint)
def supported_api_versions(self, action):
return api_version.v1_fm_versions
def allowed_content_types(self, action):
if action == 'update':
# Content-Type of Modify request shall be
# 'application/mergepatch+json' according to SOL spec.
# But 'application/json' and 'text/plain' is OK for backward
# compatibility.
return ['application/mergepatch+json', 'application/json',
'text/plain']
return ['application/json', 'text/plain']
def index(self, request):
filter_param = request.GET.get('filter')
if filter_param is not None:
filters = self._fm_view.parse_filter(filter_param)
else:
filters = None
page_size = CONF.v2_vnfm.vnffm_alarm_page_size
pager = self._fm_view.parse_pager(request, page_size)
alarms = fm_alarm_utils.get_alarms_all(request.context,
marker=pager.marker)
resp_body = self._fm_view.detail_list(alarms, filters, None, pager)
return sol_wsgi.SolResponse(
200, resp_body, version=api_version.CURRENT_FM_VERSION,
link=pager.get_link())
def show(self, request, id):
alarm = fm_alarm_utils.get_alarm(request.context, id)
resp_body = self._fm_view.detail(alarm)
return sol_wsgi.SolResponse(200, resp_body,
version=api_version.CURRENT_FM_VERSION)
@validator.schema(schema.AlarmModifications_V130, '1.3.0')
@coordinate.lock_resources('{id}')
def update(self, request, id, body):
context = request.context
alarm = fm_alarm_utils.get_alarm(context, id)
ack_state = body['ackState']
if alarm.ackState == ack_state:
raise sol_ex.AckStateInvalid()
alarm.ackState = ack_state
with context.session.begin(subtransactions=True):
alarm.update(context)
return sol_wsgi.SolResponse(200, body,
version=api_version.CURRENT_FM_VERSION)
@validator.schema(schema.FmSubscriptionRequest_V130, '1.3.0')
def subscription_create(self, request, body):
context = request.context
subsc = objects.FmSubscriptionV1(
id=uuidutils.generate_uuid(),
callbackUri=body['callbackUri']
)
if body.get('filter'):
subsc.filter = (
objects.FmNotificationsFilterV1.from_dict(
body['filter'])
)
auth_req = body.get('authentication')
if auth_req:
auth = objects.SubscriptionAuthentication(
authType=auth_req['authType']
)
if 'BASIC' in auth.authType:
basic_req = auth_req.get('paramsBasic')
if basic_req is None:
msg = "ParamsBasic must be specified."
raise sol_ex.InvalidSubscription(sol_detail=msg)
auth.paramsBasic = (
objects.SubscriptionAuthentication_ParamsBasic(
userName=basic_req.get('userName'),
password=basic_req.get('password')
)
)
if 'OAUTH2_CLIENT_CREDENTIALS' in auth.authType:
oauth2_req = auth_req.get('paramsOauth2ClientCredentials')
if oauth2_req is None:
msg = "paramsOauth2ClientCredentials must be specified."
raise sol_ex.InvalidSubscription(sol_detail=msg)
auth.paramsOauth2ClientCredentials = (
objects.SubscriptionAuthentication_ParamsOauth2(
clientId=oauth2_req.get('clientId'),
clientPassword=oauth2_req.get('clientPassword'),
tokenEndpoint=oauth2_req.get('tokenEndpoint')
)
)
if 'TLS_CERT' in auth.authType:
msg = "'TLS_CERT' is not supported at the moment."
raise sol_ex.InvalidSubscription(sol_detail=msg)
subsc.authentication = auth
if CONF.v2_nfvo.test_callback_uri:
subsc_utils.test_notification(subsc)
subsc.create(context)
resp_body = self._subsc_view.detail(subsc)
self_href = subsc_utils.subsc_href(subsc.id, self.endpoint)
return sol_wsgi.SolResponse(
201, resp_body, version=api_version.CURRENT_FM_VERSION,
location=self_href)
def subscription_list(self, request):
filter_param = request.GET.get('filter')
if filter_param is not None:
filters = self._subsc_view.parse_filter(filter_param)
else:
filters = None
page_size = CONF.v2_vnfm.subscription_page_size
pager = self._subsc_view.parse_pager(request, page_size)
subscs = subsc_utils.get_subsc_all(request.context,
marker=pager.marker)
resp_body = self._subsc_view.detail_list(subscs, filters, None, pager)
return sol_wsgi.SolResponse(
200, resp_body, version=api_version.CURRENT_FM_VERSION,
link=pager.get_link())
def subscription_show(self, request, id):
subsc = subsc_utils.get_subsc(request.context, id)
resp_body = self._subsc_view.detail(subsc)
return sol_wsgi.SolResponse(200, resp_body,
version=api_version.CURRENT_FM_VERSION)
def subscription_delete(self, request, id):
context = request.context
subsc = subsc_utils.get_subsc(request.context, id)
subsc.delete(context)
return sol_wsgi.SolResponse(204, None,
version=api_version.CURRENT_FM_VERSION)

View File

@ -0,0 +1,65 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored.common import fm_subscription_utils as subsc_utils
from tacker.sol_refactored.controller import vnflcm_view as base_view
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__)
CONF = config.CONF
class AlarmViewBuilder(base_view.BaseViewBuilder):
_EXCLUDE_DEFAULT = []
def __init__(self, endpoint):
self.endpoint = endpoint
def detail(self, alarm, selector=None):
# NOTE: _links is not saved in DB. create when it is necessary.
if not alarm.obj_attr_is_set('_links'):
alarm._links = alarm_utils.make_alarm_links(alarm, self.endpoint)
resp = alarm.to_dict()
if selector is not None:
resp = selector.filter(alarm, resp)
return resp
class FmSubscriptionViewBuilder(base_view.BaseViewBuilder):
def __init__(self, endpoint):
self.endpoint = endpoint
def detail(self, subsc, selector=None):
# NOTE: _links is not saved in DB. create when it is necessary.
if not subsc.obj_attr_is_set('_links'):
self_href = subsc_utils.subsc_href(subsc.id, self.endpoint)
subsc._links = objects.FmSubscriptionV1_Links()
subsc._links.self = objects.Link(href=self_href)
resp = subsc.to_dict()
# NOTE: authentication is not included in FmSubscriptionV1
resp.pop('authentication', None)
if selector is not None:
resp = selector.filter(subsc, resp)
return resp

View File

@ -0,0 +1,297 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
from oslo_log import log as logging
from oslo_utils import uuidutils
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.api.schemas import vnfpm_v2 as schema
from tacker.sol_refactored.api import validator
from tacker.sol_refactored.api import wsgi as sol_wsgi
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import coordinate
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import monitoring_plugin_base as plugin
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.controller import vnfpm_view
from tacker.sol_refactored.nfvo import nfvo_client
from tacker.sol_refactored import objects
LOG = logging.getLogger(__name__) # NOTE: unused at the moment
CONF = config.CONF
OBJ_TYPE_TO_GROUP_TYPE = {
'Vnf': 'VirtualisedComputeResource',
'Vnfc': 'VirtualisedComputeResource',
'VnfIntCP': 'VnfInternalCP',
'VnfExtCP': 'VnfExternalCP'
}
OBJ_TYPE_TO_METRIC_LISt = {
'Vnf': {'VCpuUsageMeanVnf', 'VCpuUsagePeakVnf',
'VMemoryUsageMeanVnf', 'VMemoryUsagePeakVnf',
'VDiskUsageMeanVnf', 'VDiskUsagePeakVnf'},
'Vnfc': {'VCpuUsageMeanVnf', 'VCpuUsagePeakVnf',
'VMemoryUsageMeanVnf', 'VMemoryUsagePeakVnf',
'VDiskUsageMeanVnf', 'VDiskUsagePeakVnf'},
'VnfIntCP': {'ByteIncomingVnfIntCp', 'ByteOutgoingVnfIntCp',
'PacketIncomingVnfIntCp', 'PacketOutgoingVnfIntCp'},
'VnfExtCP': {'ByteIncomingVnfExtCp', 'ByteOutgoingVnfExtCp',
'PacketIncomingVnfExtCp', 'PacketOutgoingVnfExtCp'}
}
def _check_http_client_auth(auth_req):
auth = objects.SubscriptionAuthentication(
authType=auth_req['authType']
)
if 'BASIC' in auth.authType:
basic_req = auth_req.get('paramsBasic')
if basic_req is None:
msg = "ParamsBasic must be specified."
raise sol_ex.InvalidSubscription(sol_detail=msg)
auth.paramsBasic = (
objects.SubscriptionAuthentication_ParamsBasic(
userName=basic_req.get('userName'),
password=basic_req.get('password')
)
)
if 'OAUTH2_CLIENT_CREDENTIALS' in auth.authType:
oauth2_req = auth_req.get('paramsOauth2ClientCredentials')
if oauth2_req is None:
msg = "paramsOauth2ClientCredentials must be specified."
raise sol_ex.InvalidSubscription(sol_detail=msg)
auth.paramsOauth2ClientCredentials = (
objects.SubscriptionAuthentication_ParamsOauth2(
clientId=oauth2_req.get('clientId'),
clientPassword=oauth2_req.get('clientPassword'),
tokenEndpoint=oauth2_req.get('tokenEndpoint')
)
)
if 'TLS_CERT' in auth.authType:
msg = "'TLS_CERT' is not supported at the moment."
raise sol_ex.InvalidSubscription(sol_detail=msg)
return auth
def _check_performance_metric_or_group(
obj_type, metric_group, performance_metric):
# Check whether the object_type is consistent with the corresponding
# group name
if metric_group and (
len(metric_group) != 1 or
metric_group[0] != OBJ_TYPE_TO_GROUP_TYPE[obj_type]):
raise sol_ex.PMJobInvalidRequest
# Check if the type in performance metric matches the standard type.
if performance_metric:
metric_types = {metric.split('.')[0] for metric in performance_metric}
if not metric_types.issubset(OBJ_TYPE_TO_METRIC_LISt[obj_type]):
raise sol_ex.PMJobInvalidRequest
class VnfPmControllerV2(sol_wsgi.SolAPIController):
def __init__(self):
self.nfvo_client = nfvo_client.NfvoClient()
self.endpoint = CONF.v2_vnfm.endpoint
self._pm_job_view = vnfpm_view.PmJobViewBuilder(self.endpoint)
cls = plugin.get_class('pm_event')
self.plugin = plugin.MonitoringPlugin.get_instance(cls)
@validator.schema(schema.CreatePmJobRequest_V210, '2.1.0')
def create(self, request, body):
context = request.context
# check request body
# If this `subObjectInstanceIds` is present, the cardinality of the
# `objectInstanceIds` attribute shall be 1.
if (body.get("subObjectInstanceIds") and
len(body.get("objectInstanceIds")) > 1):
raise sol_ex.PMJobInvalidRequest
# At least one of the two attributes (performance
# metric or group) shall be present.
metric_group = body["criteria"].get('performanceMetricGroup')
performance_metric = body["criteria"].get('performanceMetric')
if not metric_group and not performance_metric:
raise sol_ex.PMJobInvalidRequest
# check the value of group or performance_metric
_check_performance_metric_or_group(
body['objectType'], metric_group, performance_metric)
# check vnf instance status
inst_ids = body["objectInstanceIds"]
for inst_id in inst_ids:
inst = inst_utils.get_inst(context, inst_id)
if inst.instantiationState == 'NOT_INSTANTIATED':
raise sol_ex.VnfInstanceIsNotInstantiated(inst_id=inst_id)
# pm_job.criteria
pm_job_criteria = objects.VnfPmJobCriteriaV2(
collectionPeriod=body["criteria"]['collectionPeriod'],
reportingPeriod=body["criteria"]['reportingPeriod']
)
criteria = body["criteria"]
if performance_metric:
pm_job_criteria.performanceMetric = criteria['performanceMetric']
if metric_group:
pm_job_criteria.performanceMetricGroup = criteria[
'performanceMetricGroup']
if criteria.get('reportingBoundary'):
try:
dt = copy.deepcopy(criteria['reportingBoundary'])
datetime.datetime.fromisoformat(dt.replace('Z', '+00:00'))
except ValueError as ex:
raise sol_ex.SolValidationError(
detail="invalid date format.") from ex
pm_job_criteria.reportingBoundary = criteria['reportingBoundary']
# pm_job
pm_job_id = uuidutils.generate_uuid()
pm_job = objects.PmJobV2(
id=pm_job_id,
objectType=body["objectType"],
objectInstanceIds=body["objectInstanceIds"],
criteria=pm_job_criteria,
callbackUri=body["callbackUri"],
reports=[],
)
if body.get("subObjectInstanceIds"):
pm_job.subObjectInstanceIds = body["subObjectInstanceIds"]
# authentication
auth_req = body.get('authentication')
if auth_req:
pm_job.authentication = _check_http_client_auth(auth_req)
# metadata
metadata = body.get('metadata')
if metadata:
pm_job.metadata = metadata
if CONF.v2_nfvo.test_callback_uri:
pm_job_utils.test_notification(pm_job)
try:
self.plugin.create_job(context=context, pm_job=pm_job)
except sol_ex.PrometheusPluginError as e:
raise sol_ex.PrometheusSettingFailed from e
pm_job.create(context)
location = pm_job_utils.pm_job_href(pm_job.id, self.endpoint)
resp_body = self._pm_job_view.detail(pm_job)
return sol_wsgi.SolResponse(201, resp_body,
version=api_version.CURRENT_PM_VERSION,
location=location)
def index(self, request):
filter_param = request.GET.get('filter')
if filter_param is not None:
filters = self._pm_job_view.parse_filter(filter_param)
else:
filters = None
# validate_filter
selector = self._pm_job_view.parse_selector(request.GET)
page_size = CONF.v2_vnfm.vnfpm_pmjob_page_size
pager = self._pm_job_view.parse_pager(request, page_size)
pm_job = pm_job_utils.get_pm_job_all(request.context,
marker=pager.marker)
resp_body = self._pm_job_view.detail_list(pm_job, filters,
selector, pager)
return sol_wsgi.SolResponse(200, resp_body,
version=api_version.CURRENT_PM_VERSION,
link=pager.get_link())
def show(self, request, id):
pm_job = pm_job_utils.get_pm_job(request.context, id)
pm_job_resp = self._pm_job_view.detail(pm_job)
return sol_wsgi.SolResponse(200, pm_job_resp,
version=api_version.CURRENT_PM_VERSION)
@validator.schema(schema.PmJobModificationsRequest_V210, '2.1.0')
@coordinate.lock_resources('{id}')
def update(self, request, id, body):
context = request.context
pm_job = pm_job_utils.get_pm_job(context, id)
if body.get("callbackUri"):
pm_job.callbackUri = body.get("callbackUri")
if body.get("authentication"):
pm_job.authentication = _check_http_client_auth(
body.get("authentication"))
if CONF.v2_nfvo.test_callback_uri:
pm_job_utils.test_notification(pm_job)
with context.session.begin(subtransactions=True):
pm_job.update(context)
pm_job_modifications = objects.PmJobModificationsV2(
callbackUri=pm_job.callbackUri,
)
resp = pm_job_modifications.to_dict()
return sol_wsgi.SolResponse(200, resp,
version=api_version.CURRENT_PM_VERSION)
@coordinate.lock_resources('{id}')
def delete(self, request, id):
context = request.context
pm_job = pm_job_utils.get_pm_job(context, id)
self.plugin.delete_job(context=context, pm_job=pm_job)
reports = objects.PerformanceReportV2.get_by_filter(context,
jobId=pm_job.id)
for report in reports:
report.delete(context)
pm_job.delete(context)
return sol_wsgi.SolResponse(204, None,
version=api_version.CURRENT_PM_VERSION)
def report_get(self, request, id, report_id):
pm_report = pm_job_utils.get_pm_report(
request.context, id, report_id)
pm_report_resp = self._pm_job_view.report_detail(pm_report)
return sol_wsgi.SolResponse(200, pm_report_resp,
version=api_version.CURRENT_PM_VERSION)
def allowed_content_types(self, action):
if action == 'update':
# Content-Type of Modify request shall be
# 'application/mergepatch+json' according to SOL spec.
# But 'application/json' and 'text/plain' is OK for backward
# compatibility.
return ['application/mergepatch+json', 'application/json',
'text/plain']
return ['application/json', 'text/plain']
def supported_api_versions(self, action):
return api_version.v2_pm_versions

View File

@ -0,0 +1,54 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.controller import vnflcm_view as base_view
LOG = logging.getLogger(__name__)
CONF = config.CONF
class PmJobViewBuilder(base_view.BaseViewBuilder):
_EXCLUDE_DEFAULT = []
def __init__(self, endpoint):
self.endpoint = endpoint
def detail(self, pm_job, selector=None):
# NOTE: _links is not saved in DB. create when it is necessary.
if not pm_job.obj_attr_is_set('_links'):
pm_job._links = pm_job_utils.make_pm_job_links(
pm_job, self.endpoint)
resp = pm_job.to_dict()
if resp.get('authentication'):
resp.pop('authentication', None)
if resp.get('metadata'):
resp.pop('metadata', None)
if selector is not None:
resp = selector.filter(pm_job, resp)
return resp
def report_detail(self, pm_report):
resp = pm_report.to_dict()
if resp.get('id'):
resp.pop('id')
if resp.get('jobId'):
resp.pop('jobId')
return resp

View File

@ -152,3 +152,88 @@ class GrantRequestV1(model_base.BASE):
placementConstraints = sa.Column(sa.JSON(), nullable=True)
vimConstraints = sa.Column(sa.JSON(), nullable=True)
additionalParams = sa.Column(sa.JSON(), nullable=True)
class AlarmV1(model_base.BASE):
"""Type: Alarm
NFV-SOL 003
- v3.3.1 7.5.2.4 (API version: 1.3.0)
"""
__tablename__ = 'AlarmV1'
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
managedObjectId = sa.Column(sa.String(255), nullable=False)
vnfcInstanceIds = sa.Column(sa.JSON(), nullable=True)
rootCauseFaultyResource = sa.Column(sa.JSON(), nullable=True)
alarmRaisedTime = sa.Column(sa.DateTime(), nullable=False)
alarmChangedTime = sa.Column(sa.DateTime(), nullable=True)
alarmClearedTime = sa.Column(sa.DateTime(), nullable=True)
alarmAcknowledgedTime = sa.Column(sa.DateTime(), nullable=True)
ackState = sa.Column(sa.Enum(
'UNACKNOWLEDGED', 'ACKNOWLEDGED', create_constraint=True,
validate_strings=True), nullable=False)
perceivedSeverity = sa.Column(sa.Enum(
'CRITICAL', 'MAJOR', 'MINOR', 'WARNING', 'INDETERMINATE', 'CLEARED',
create_constraint=True, validate_strings=True), nullable=False)
eventTime = sa.Column(sa.DateTime(), nullable=False)
eventType = sa.Column(sa.Enum(
'COMMUNICATIONS_ALARM', 'PROCESSING_ERROR_ALARM',
'ENVIRONMENTAL_ALARM', 'QOS_ALARM', 'EQUIPMENT_ALARM',
create_constraint=True, validate_strings=True), nullable=False)
faultType = sa.Column(sa.String(255), nullable=True)
probableCause = sa.Column(sa.String(255), nullable=False)
isRootCause = sa.Column(sa.Boolean, nullable=False)
correlatedAlarmIds = sa.Column(sa.JSON(), nullable=True)
faultDetails = sa.Column(sa.JSON(), nullable=True)
class FmSubscriptionV1(model_base.BASE):
"""Type: FmSubscription
NFV-SOL 003
- v3.3.1 7.5.2.3 (API version: 1.3.0)
"""
__tablename__ = 'FmSubscriptionV1'
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
filter = sa.Column(sa.JSON(), nullable=True)
callbackUri = sa.Column(sa.String(255), nullable=False)
# NOTE: 'authentication' attribute is not included in the
# original 'FmSubscription' data type definition.
authentication = sa.Column(sa.JSON(), nullable=True)
class PmJobV2(model_base.BASE):
"""Type: PmJob
NFV-SOL 003
- v3.3.1 6.5.2.7 (API version: 2.1.0)
"""
__tablename__ = 'PmJobV2'
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
objectType = sa.Column(sa.String(32), nullable=False)
objectInstanceIds = sa.Column(sa.JSON(), nullable=False)
subObjectInstanceIds = sa.Column(sa.JSON(), nullable=True)
criteria = sa.Column(sa.JSON(), nullable=False)
callbackUri = sa.Column(sa.String(255), nullable=False)
reports = sa.Column(sa.JSON(), nullable=True)
# NOTE: 'authentication' attribute is not included in the
# original 'PmJob' data type definition.
authentication = sa.Column(sa.JSON(), nullable=True)
# NOTE: 'metadata' attribute is not included in the
# original 'PmJob' data type definition.
metadata__ = sa.Column("metadata", sa.JSON(), nullable=True)
class PerformanceReportV2(model_base.BASE):
"""Type: Report
NFV-SOL 003
- v3.3.1 6.5.2.10 (API version: 2.1.0)
"""
__tablename__ = 'PerformanceReportV2'
id = sa.Column(sa.String(255), nullable=False, primary_key=True)
jobId = sa.Column(sa.String(255), nullable=False, primary_key=False)
entries = sa.Column(sa.JSON(), nullable=False)

View File

@ -17,8 +17,11 @@
from oslo_log import log as logging
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored.common import fm_subscription_utils as fm_utils
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import lcm_op_occ_utils as lcmocc_utils
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.common import subscription_utils as subsc_utils
from tacker.sol_refactored.common import vnfd_utils
from tacker.sol_refactored.nfvo import local_nfvo
@ -144,3 +147,24 @@ class NfvoClient(object):
if self.is_local:
self.nfvo.recv_lcmocc_notification(context, lcmocc, inst)
def send_alarm_notification(self, context, alarm, inst, endpoint):
subscs = fm_utils.get_alarm_subscs(context, alarm, inst)
for subsc in subscs:
notif_data = alarm_utils.make_alarm_notif_data(
subsc, alarm, endpoint)
fm_utils.send_notification(subsc, notif_data)
def send_pm_job_notification(self, report, pm_job, timestamp, endpoint):
report_object_instance_id = {entry.objectInstanceId
for entry in report.entries}
for instance_id in report_object_instance_id:
sub_instance_ids = [
entry.subObjectInstanceId for entry in report.entries
if (entry.objectInstanceId == instance_id and
entry.obj_attr_is_set('subObjectInstanceId'))
]
notif_data = pm_job_utils.make_pm_notif_data(
instance_id, sub_instance_ids, report.id,
pm_job, timestamp, endpoint)
pm_job_utils.send_notification(pm_job, notif_data)

View File

@ -42,8 +42,14 @@ def register_all():
__import__(objects_root + '.common.vnf_ext_cp_config')
__import__(objects_root + '.common.vnf_ext_cp_data')
__import__(objects_root + '.common.vnf_instance_subscription_filter')
__import__(objects_root + '.v1.alarm')
__import__(objects_root + '.v1.alarm_cleared_notification')
__import__(objects_root + '.v1.alarm_notification')
__import__(objects_root + '.v1.constraint_resource_ref')
__import__(objects_root + '.v1.fields')
__import__(objects_root + '.v1.fm_notifications_filter')
__import__(objects_root + '.v1.fm_subscription')
__import__(objects_root + '.v1.fm_subscription_request')
__import__(objects_root + '.v1.grant_info')
__import__(objects_root + '.v1.grant')
__import__(objects_root + '.v1.grant_request')
@ -84,9 +90,15 @@ def register_all():
__import__(objects_root + '.v2.modifications_triggered_by_vnf_pkg_change')
__import__(objects_root + '.v2.monitoring_parameter')
__import__(objects_root + '.v2.operate_vnf_request')
__import__(objects_root +
'.v2.performance_information_available_notification')
__import__(objects_root + '.v2.pkgm_links')
__import__(objects_root + '.v2.pkgm_notification_filter')
__import__(objects_root + '.v2.pkgm_subscription_request')
__import__(objects_root + '.v2.pm_job')
__import__(objects_root + '.v2.pm_job_criteria')
__import__(objects_root + '.v2.pm_job_modification')
__import__(objects_root + '.v2.pm_report')
__import__(objects_root + '.v2.revert_to_vnf_snapshot_request')
__import__(objects_root + '.v2.scale_info')
__import__(objects_root + '.v2.scale_vnf_request')

View File

@ -0,0 +1,84 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
from tacker.sol_refactored.objects.v1 import fields as v1fields
# NFV-SOL 003
# - v3.3.1 7.5.2.4 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class AlarmV1(base.TackerPersistentObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'managedObjectId': fields.StringField(nullable=False),
# NOTE: vnfcInstanceIds is defined in NFV-SOL 002 v3.3.1 7.5.2.4
'vnfcInstanceIds': fields.ListOfStringsField(nullable=True),
'rootCauseFaultyResource': fields.ObjectField(
'AlarmV1_FaultyResourceInfo', nullable=True),
'alarmRaisedTime': fields.DateTimeField(nullable=False),
'alarmChangedTime': fields.DateTimeField(nullable=True),
'alarmClearedTime': fields.DateTimeField(nullable=True),
'alarmAcknowledgedTime': fields.DateTimeField(nullable=True),
'ackState': fields.EnumField(
valid_values=[
'UNACKNOWLEDGED',
'ACKNOWLEDGED',
],
nullable=False),
'perceivedSeverity': v1fields.PerceivedSeverityTypeField(
nullable=False),
'eventTime': fields.DateTimeField(nullable=False),
'eventType': v1fields.EventTypeField(nullable=False),
'faultType': fields.StringField(nullable=True),
'probableCause': fields.StringField(nullable=False),
'isRootCause': fields.BooleanField(nullable=False),
'correlatedAlarmIds': fields.ListOfStringsField(nullable=True),
'faultDetails': fields.ListOfStringsField(nullable=True),
'_links': fields.ObjectField('AlarmV1_Links', nullable=False),
}
@base.TackerObjectRegistry.register
class AlarmV1_FaultyResourceInfo(base.TackerObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'faultyResource': fields.ObjectField(
'ResourceHandle', nullable=False),
'faultyResourceType': v1fields.FaultyResourceTypeField(
nullable=False)
}
@base.TackerObjectRegistry.register
class AlarmV1_Links(base.TackerObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'self': fields.ObjectField('Link', nullable=False),
'objectInstance': fields.ObjectField('Link', nullable=True)
}

View File

@ -0,0 +1,50 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 7.5.2.6 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class AlarmClearedNotificationV1(base.TackerObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'notificationType': fields.StringField(nullable=False),
'subscriptionId': fields.StringField(nullable=False),
'timeStamp': fields.DateTimeField(nullable=False),
'alarmId': fields.StringField(nullable=False),
'alarmClearedTime': fields.DateTimeField(nullable=False),
'_links': fields.ObjectField(
'AlarmClearedNotificationV1_Links', nullable=False)
}
@base.TackerObjectRegistry.register
class AlarmClearedNotificationV1_Links(base.TackerObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'subscription': fields.ObjectField('NotificationLink', nullable=False),
'alarm': fields.ObjectField('NotificationLink', nullable=True)
}

View File

@ -0,0 +1,47 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 7.5.2.5 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class AlarmNotificationV1(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'notificationType': fields.StringField(nullable=False),
'subscriptionId': fields.StringField(nullable=False),
'timeStamp': fields.DateTimeField(nullable=False),
'alarm': fields.ObjectField('AlarmV1', nullable=False),
'_links': fields.ObjectField(
'AlarmNotificationV1_Links', nullable=False)
}
@base.TackerObjectRegistry.register
class AlarmNotificationV1_Links(base.TackerObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'subscription': fields.ObjectField('NotificationLink', nullable=False)
}

View File

@ -41,3 +41,60 @@ class GrantedLcmOperationType(fields.BaseTackerEnum):
class GrantedLcmOperationTypeField(fields.BaseEnumField):
AUTO_TYPE = GrantedLcmOperationType()
# NFV-SOL 003
# - v2.6.1 7.5.4.3 (API version: 1.2.0)
# - v2.7.1 7.5.4.3 (API version: 1.3.0)
# - v2.8.1 7.5.4.3 (API version: 1.3.0)
# - v3.3.1 7.5.4.3 (API version: 1.3.0)
class PerceivedSeverityType(fields.BaseTackerEnum):
CRITICAL = 'CRITICAL'
MAJOR = 'MAJOR'
MINOR = 'MINOR'
WARNING = 'WARNING'
INDETERMINATE = 'INDETERMINATE'
CLEARED = 'CLEARED'
ALL = (CRITICAL, MAJOR, MINOR, WARNING, INDETERMINATE, CLEARED)
class PerceivedSeverityTypeField(fields.BaseEnumField):
AUTO_TYPE = PerceivedSeverityType()
# NFV-SOL 003
# - v2.6.1 7.5.4.5 (API version: 1.2.0)
# - v2.7.1 7.5.4.5 (API version: 1.3.0)
# - v2.8.1 7.5.4.5 (API version: 1.3.0)
# - v3.3.1 7.5.4.5 (API version: 1.3.0)
class EventType(fields.BaseTackerEnum):
COMMUNICATIONS_ALARM = 'COMMUNICATIONS_ALARM'
PROCESSING_ERROR_ALARM = 'PROCESSING_ERROR_ALARM'
ENVIRONMENTAL_ALARM = 'ENVIRONMENTAL_ALARM'
QOS_ALARM = 'QOS_ALARM'
EQUIPMENT_ALARM = 'EQUIPMENT_ALARM'
ALL = (COMMUNICATIONS_ALARM, PROCESSING_ERROR_ALARM,
ENVIRONMENTAL_ALARM, QOS_ALARM, EQUIPMENT_ALARM)
class EventTypeField(fields.BaseEnumField):
AUTO_TYPE = EventType()
# NFV-SOL 003
# - v2.6.1 7.5.4.5 (API version: 1.2.0)
# - v2.7.1 7.5.4.5 (API version: 1.3.0)
# - v2.8.1 7.5.4.5 (API version: 1.3.0)
# - v3.3.1 7.5.4.5 (API version: 1.3.0)
class FaultyResourceType(fields.BaseTackerEnum):
COMPUTE = 'COMPUTE'
STORAGE = 'STORAGE'
NETWORK = 'NETWORK'
ALL = (COMPUTE, STORAGE, NETWORK)
class FaultyResourceTypeField(fields.BaseEnumField):
AUTO_TYPE = FaultyResourceType()

View File

@ -0,0 +1,47 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
from tacker.sol_refactored.objects.v1 import fields as v1fields
# NFV-SOL 003
# - v3.3.1 7.5.3.2 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class FmNotificationsFilterV1(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'vnfInstanceSubscriptionFilter': fields.ObjectField(
'VnfInstanceSubscriptionFilter', nullable=True),
'notificationTypes': fields.ListOfEnumField(
valid_values=[
'AlarmNotification',
'AlarmClearedNotification',
'AlarmListRebuiltNotification',
],
nullable=True,
),
'faultyResourceTypes': fields.Field(fields.List(
v1fields.FaultyResourceTypeField(), nullable=True)),
'perceivedSeverities': fields.Field(fields.List(
v1fields.PerceivedSeverityTypeField(), nullable=True)),
'eventTypes': fields.Field(fields.List(
v1fields.EventTypeField(), nullable=True)),
'probableCauses': fields.ListOfStringsField(nullable=True)
}

View File

@ -0,0 +1,52 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 7.5.2.3 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class FmSubscriptionV1(base.TackerPersistentObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'filter': fields.ObjectField(
'FmNotificationsFilterV1', nullable=True),
'callbackUri': fields.UriField(nullable=False),
# NOTE: 'authentication' attribute is not included in the
# original 'FmSubscription' data type definition.
# It is necessary to keep this to be used at sending
# notifications. Note that it is dropped at GET subscription.
'authentication': fields.ObjectField(
'SubscriptionAuthentication', nullable=True),
'_links': fields.ObjectField(
'FmSubscriptionV1_Links', nullable=False),
}
@base.TackerObjectRegistry.register
class FmSubscriptionV1_Links(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'self': fields.ObjectField('Link', nullable=False),
}

View File

@ -0,0 +1,34 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 7.5.2.2 (API version: 1.3.0)
@base.TackerObjectRegistry.register
class FmSubscriptionRequestV1(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'filter': fields.ObjectField(
'FmNotificationsFilterV1', nullable=True),
'callbackUri': fields.UriField(nullable=False),
'authentication': fields.ObjectField(
'SubscriptionAuthentication', nullable=True)
}

View File

@ -0,0 +1,36 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.2.6 (API version: 2.1.0)
@base.TackerObjectRegistry.register
class CreatePmJobRequestV2(base.TackerObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objectType': fields.StringField(nullable=False),
'objectInstanceIds': fields.ListOfStringsField(nullable=False),
'subObjectInstanceIds': fields.ListOfStringsField(nullable=True),
'criteria': fields.ObjectField('VnfPmJobCriteriaV2', nullable=False),
'callbackUri': fields.UriField(nullable=False),
'authentication': fields.ObjectField(
'SubscriptionAuthentication', nullable=True),
}

View File

@ -0,0 +1,62 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.2.5 (API version: 2.1.0)
@base.TackerObjectRegistry.register
class PerformanceInformationAvailableNotificationV2(
base.TackerObject,
base.TackerObjectDictCompat
):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'notificationType': fields.StringField(nullable=False),
'timeStamp': fields.DateTimeField(nullable=False),
'pmJobId': fields.StringField(nullable=False),
'objectType': fields.StringField(nullable=False),
'objectInstanceId': fields.StringField(nullable=False),
'subObjectInstanceIds': fields.ListOfStringsField(nullable=True),
'_links': fields.ObjectField(
'PerformanceInformationAvailableNotificationV2_Links',
nullable=False),
}
@base.TackerObjectRegistry.register
class PerformanceInformationAvailableNotificationV2_Links(
base.TackerObject,
base.TackerObjectDictCompat
):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objectInstance': fields.ObjectField(
'NotificationLink', nullable=True),
'pmJob': fields.ObjectField(
'NotificationLink', nullable=False),
'performanceReport': fields.ObjectField(
'NotificationLink', nullable=False),
}

View File

@ -0,0 +1,75 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.2.7 (API version: 2.1.0)
@base.TackerObjectRegistry.register
class PmJobV2(base.TackerPersistentObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.StringField(nullable=False),
'objectType': fields.StringField(nullable=False),
'objectInstanceIds': fields.ListOfStringsField(nullable=False),
'subObjectInstanceIds': fields.ListOfStringsField(nullable=True),
'criteria': fields.ObjectField('VnfPmJobCriteriaV2', nullable=False),
'callbackUri': fields.UriField(nullable=False),
'reports': fields.ListOfObjectsField(
'VnfPmJobV2_Reports', nullable=False),
'_links': fields.ObjectField(
'VnfPmJobV2_Links', nullable=False),
# NOTE: 'authentication' attribute is not included in the
# original 'PmJob' data type definition.
# It is necessary to keep this to be used at sending
# notifications. Note that it is dropped at GET subscription.
'authentication': fields.ObjectField(
'SubscriptionAuthentication', nullable=True),
# NOTE: 'metadata' attribute is not included in the
# original 'PmJob' data type definition.
# It is necessary to keep this to be used at setting prometheus config.
'metadata': fields.KeyValuePairsField(nullable=True),
}
@base.TackerObjectRegistry.register
class VnfPmJobV2_Reports(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'href': fields.UriField(nullable=False),
'readyTime': fields.DateTimeField(nullable=False),
'expiryTime': fields.DateTimeField(nullable=True),
'fileSize': fields.IntegerField(nullable=True),
}
@base.TackerObjectRegistry.register
class VnfPmJobV2_Links(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'self': fields.ObjectField('Link', nullable=False),
'objects': fields.ListOfObjectsField('Link', nullable=True),
}

View File

@ -0,0 +1,34 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.3.3 (API version: 2.0.0)
@base.TackerObjectRegistry.register
class VnfPmJobCriteriaV2(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'performanceMetric': fields.ListOfStringsField(nullable=True),
'performanceMetricGroup': fields.ListOfStringsField(nullable=True),
'collectionPeriod': fields.IntegerField(nullable=False),
'reportingPeriod': fields.IntegerField(nullable=False),
'reportingBoundary': fields.DateTimeField(nullable=True),
}

View File

@ -0,0 +1,32 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.2.12 (API version: 2.1.0)
@base.TackerObjectRegistry.register
class PmJobModificationsV2(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'callbackUri': fields.StringField(nullable=False),
'authentication': fields.ObjectField(
'SubscriptionAuthentication', nullable=False),
}

View File

@ -0,0 +1,65 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker.sol_refactored.objects import base
from tacker.sol_refactored.objects import fields
# NFV-SOL 003
# - v3.3.1 6.5.2.10 (API version: 2.1.0)
@base.TackerObjectRegistry.register
class PerformanceReportV2(base.TackerPersistentObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
# PerformanceReportV2 need 'id' and 'jobId'
fields = {
'id': fields.StringField(nullable=False),
'jobId': fields.StringField(nullable=False),
'entries': fields.ListOfObjectsField(
'VnfPmReportV2_Entries', nullable=False),
}
@base.TackerObjectRegistry.register
class VnfPmReportV2_Entries(base.TackerObject, base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objectType': fields.StringField(nullable=False),
'objectInstanceId': fields.StringField(nullable=False),
'subObjectInstanceId': fields.StringField(nullable=True),
'performanceMetric': fields.StringField(nullable=False),
'performanceValues': fields.ListOfObjectsField(
'VnfPmReportV2_Entries_PerformanceValues', nullable=False),
}
@base.TackerObjectRegistry.register
class VnfPmReportV2_Entries_PerformanceValues(base.TackerObject,
base.TackerObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'timeStamp': fields.DateTimeField(nullable=False),
'value': fields.StringField(nullable=False),
'context': fields.KeyValuePairsField(nullable=True),
}

View File

@ -189,7 +189,28 @@ def PrepareRequestHandler(manager):
inspect.currentframe().f_code.co_name))
def do_PUT(self):
raise NotImplementedError
"""Process PUT request"""
LOG.debug(
'[Start] %s.%s()' %
(self.__class__.__name__,
inspect.currentframe().f_code.co_name))
# URI might have trailing uuid or not.
if self._is_match_with_list():
# Request is registered in our list.
tplUri = urlparse(self.path)
self._returned_callback(tplUri.path,
manager._funcs_puts[tplUri.path])
else:
# Unregistered URI is requested
LOG.debug('PUT Recv. Unknown URL: "%s"' % self.path)
self.send_response(http.HTTPStatus.BAD_REQUEST)
self.end_headers()
LOG.debug(
'[ End ] %s.%s()' %
(self.__class__.__name__,
inspect.currentframe().f_code.co_name))
return DummyRequestHandler

View File

@ -53,6 +53,10 @@ class BaseVnfLcmKubernetesV2OidcTest(base_v2.BaseVnfLcmKubernetesV2Test):
)
cls.tacker_client = http_client.HttpClient(auth)
@classmethod
def tearDownClass(cls):
super(base_v2.BaseVnfLcmKubernetesV2Test, cls).tearDownClass()
@classmethod
def get_k8s_vim_info(cls):
vim_params = yaml.safe_load(

View File

@ -16,6 +16,7 @@ import os
import shutil
import tempfile
import time
import urllib
from oslo_config import cfg
from oslo_utils import uuidutils
@ -24,10 +25,14 @@ import yaml
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored import objects
from tacker.tests.functional.common.fake_server import FakeServerManager
from tacker.tests.functional.sol_v2_common import utils
from tacker.tests import utils as base_utils
from tacker import version
FAKE_SERVER_MANAGER = FakeServerManager()
MOCK_NOTIFY_CALLBACK_URL = '/notification/callback'
VNF_PACKAGE_UPLOAD_TIMEOUT = 300
VNF_INSTANTIATE_TIMEOUT = 600
VNF_TERMINATE_TIMEOUT = 600
@ -41,6 +46,9 @@ class BaseVnfLcmKubernetesV2Test(base.BaseTestCase):
super(BaseVnfLcmKubernetesV2Test, cls).setUpClass()
"""Base test case class for SOL v2 kubernetes functional tests."""
FAKE_SERVER_MANAGER.prepare_http_server()
FAKE_SERVER_MANAGER.start_server()
cfg.CONF(args=['--config-file', '/etc/tacker/tacker.conf'],
project='tacker',
version='%%prog %s' % version.version_info.release_string())
@ -61,6 +69,22 @@ class BaseVnfLcmKubernetesV2Test(base.BaseTestCase):
project_domain_name=vim_info.accessInfo['projectDomain']
)
cls.tacker_client = http_client.HttpClient(auth)
cls.fake_prometheus_ip = cls.get_controller_tacker_ip()
@classmethod
def tearDownClass(cls):
super(BaseVnfLcmKubernetesV2Test, cls).tearDownClass()
FAKE_SERVER_MANAGER.stop_server()
def setUp(self):
super().setUp()
callback_url = os.path.join(
MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
FAKE_SERVER_MANAGER.clear_history(callback_url)
FAKE_SERVER_MANAGER.set_callback('POST', callback_url, status_code=204)
FAKE_SERVER_MANAGER.set_callback('GET', callback_url, status_code=204)
@classmethod
def get_vim_info(cls):
@ -151,6 +175,35 @@ class BaseVnfLcmKubernetesV2Test(base.BaseTestCase):
return pkg_id, vnfd_id
@classmethod
def get_controller_tacker_ip(cls):
cur_dir = os.path.dirname(__file__)
script_path = os.path.join(
cur_dir, "../../../../tools/test-setup-fake-prometheus-server.sh")
with open(script_path, 'r') as f_obj:
content = f_obj.read()
ip = content.split('TEST_REMOTE_URI')[1].split(
'http://')[1].split('"')[0]
return ip
def assert_notification_get(self, callback_url):
notify_mock_responses = FAKE_SERVER_MANAGER.get_history(
callback_url)
FAKE_SERVER_MANAGER.clear_history(
callback_url)
self.assertEqual(1, len(notify_mock_responses))
self.assertEqual(204, notify_mock_responses[0].status_code)
def _check_notification(self, callback_url, notify_type):
notify_mock_responses = FAKE_SERVER_MANAGER.get_history(
callback_url)
FAKE_SERVER_MANAGER.clear_history(
callback_url)
self.assertEqual(1, len(notify_mock_responses))
self.assertEqual(204, notify_mock_responses[0].status_code)
self.assertEqual(notify_type, notify_mock_responses[0].request_body[
'notificationType'])
@classmethod
def delete_vnf_package(cls, pkg_id):
path = f"/vnfpkgm/v1/vnf_packages/{pkg_id}"
@ -233,6 +286,92 @@ class BaseVnfLcmKubernetesV2Test(base.BaseTestCase):
return self.tacker_client.do_request(
path, "GET", version="2.0.0")
def create_subscription(self, req_body):
path = "/vnffm/v1/subscriptions"
return self.tacker_client.do_request(
path, "POST", body=req_body, version="1.3.0")
def list_subscriptions(self, filter_expr=None):
path = "/vnffm/v1/subscriptions"
if filter_expr:
path = "{}?{}".format(path, urllib.parse.urlencode(filter_expr))
return self.tacker_client.do_request(
path, "GET", version="1.3.0")
def show_subscription(self, subscription_id):
path = f"/vnffm/v1/subscriptions/{subscription_id}"
return self.tacker_client.do_request(
path, "GET", version="1.3.0")
def delete_subscription(self, subscription_id):
path = f"/vnffm/v1/subscriptions/{subscription_id}"
return self.tacker_client.do_request(
path, "DELETE", version="1.3.0")
def create_fm_alarm(self, req_body):
path = "/alert"
return self.tacker_client.do_request(
path, "POST", body=req_body, version="1.3.0")
def list_fm_alarm(self, filter_expr=None):
path = "/vnffm/v1/alarms"
if filter_expr:
path = "{}?{}".format(path, urllib.parse.urlencode(filter_expr))
return self.tacker_client.do_request(
path, "GET", version="1.3.0")
def show_fm_alarm(self, alarm_id):
path = f"/vnffm/v1/alarms/{alarm_id}"
return self.tacker_client.do_request(
path, "GET", version="1.3.0")
def update_fm_alarm(self, alarm_id, req_body):
path = f"/vnffm/v1/alarms/{alarm_id}"
return self.tacker_client.do_request(
path, "PATCH", body=req_body, version="1.3.0")
def create_pm_job(self, req_body):
path = "/vnfpm/v2/pm_jobs"
return self.tacker_client.do_request(
path, "POST", body=req_body, version="2.1.0")
def update_pm_job(self, pm_job_id, req_body):
path = f"/vnfpm/v2/pm_jobs/{pm_job_id}"
return self.tacker_client.do_request(
path, "PATCH", body=req_body, version="2.1.0")
def create_pm_event(self, req_body):
path = "/pm_event"
return self.tacker_client.do_request(
path, "POST", body=req_body, version="2.1.0")
def list_pm_job(self, filter_expr=None):
path = "/vnfpm/v2/pm_jobs"
if filter_expr:
path = "{}?{}".format(path, urllib.parse.urlencode(filter_expr))
return self.tacker_client.do_request(
path, "GET", version="2.1.0")
def show_pm_job(self, pm_job_id):
path = f"/vnfpm/v2/pm_jobs/{pm_job_id}"
return self.tacker_client.do_request(
path, "GET", version="2.1.0")
def show_pm_job_report(self, pm_job_id, report_id):
path = f"/vnfpm/v2/pm_jobs/{pm_job_id}/reports/{report_id}"
return self.tacker_client.do_request(
path, "GET", version="2.1.0")
def delete_pm_job(self, pm_job_id):
path = f"/vnfpm/v2/pm_jobs/{pm_job_id}"
return self.tacker_client.do_request(
path, "DELETE", version="2.1.0")
def prometheus_auto_scaling_alert(self, req_body):
path = "/alert/vnf_instances"
return self.tacker_client.do_request(
path, "POST", body=req_body)
def _check_resp_headers(self, resp, supported_headers):
unsupported_headers = ['Link', 'Retry-After',
'Content-Range', 'WWW-Authenticate']

View File

@ -407,3 +407,328 @@ def helm_error_handling_change_vnfpkg(vnfd_id):
}]
}
}
def instantiate_cnf_resources_create(vnfd_id):
return {
"vnfdId": vnfd_id,
"vnfInstanceName": "test",
"vnfInstanceDescription": "test",
"metadata": {"dummy-key": "dummy-val"}
}
def pm_instantiate_cnf_resources_create(vnfd_id):
return {
"vnfdId": vnfd_id,
"vnfInstanceName": "test",
"vnfInstanceDescription": "test"
}
def instantiate_vnf_min():
# Omit except for required attributes
# NOTE: Only the following cardinality attributes are set.
# - 1
# - 1..N (1)
return {
"flavourId": "simple"
}
def sub_create_min(callback_uri):
# Omit except for required attributes
# NOTE: Only the following cardinality attributes are set.
# - 1
# - 1..N (1)
return {
"callbackUri": callback_uri
}
def sub_create_max(callback_uri, vnfd_id, inst_id):
return {
"filter": {
"vnfInstanceSubscriptionFilter": {
"vnfdIds": [vnfd_id],
"vnfProductsFromProviders": [
{
"vnfProvider": "Company",
"vnfProducts": [
{
"vnfProductName": "Sample VNF",
"versions": [
{
"vnfSoftwareVersion": "1.0",
"vnfdVersions": ["1.0"]
}
]
}
]
},
],
"vnfInstanceIds": [inst_id],
"vnfInstanceNames": ["test"],
},
"notificationTypes": ["AlarmNotification",
"AlarmClearedNotification"],
"faultyResourceTypes": ["COMPUTE"],
"perceivedSeverities": ["WARNING"],
"eventTypes": ["PROCESSING_ERROR_ALARM"],
"probableCauses": ["Process Terminated"]
},
"callbackUri": callback_uri
}
def alert_event_firing(inst_id, pod_name):
return {
"receiver": "receiver",
"status": "firing",
"alerts": [
{
"status": "firing",
"labels": {
"receiver_type": "tacker",
"function_type": "vnffm",
"vnf_instance_id": inst_id,
"pod": pod_name,
"perceived_severity": "WARNING",
"event_type": "PROCESSING_ERROR_ALARM"
},
"annotations": {
"fault_type": "Server Down",
"probable_cause": "Process Terminated",
"fault_details": "pid 12345"
},
"startsAt": "2022-06-21T23:47:36.453Z",
"endsAt": "0001-01-01T00:00:00Z",
"generatorURL": "http://controller147:9090/graph?g0.expr="
"up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1",
"fingerprint": "5ef77f1f8a3ecb8d"
}
],
"groupLabels": {},
"commonLabels": {
"alertname": "NodeInstanceDown",
"job": "node"
},
"commonAnnotations": {
"description": "sample"
},
"externalURL": "http://controller147:9093",
"version": "4",
"groupKey": "{}:{}",
"truncatedAlerts": 0
}
def alert_event_resolved(inst_id, pod_name):
return {
"receiver": "receiver",
"status": "resolved",
"alerts": [
{
"status": "resolved",
"labels": {
"receiver_type": "tacker",
"function_type": "vnffm",
"vnf_instance_id": inst_id,
"pod": pod_name,
"perceived_severity": "WARNING",
"event_type": "PROCESSING_ERROR_ALARM"
},
"annotations": {
"fault_type": "Server Down",
"probable_cause": "Process Terminated",
"fault_details": "pid 12345"
},
"startsAt": "2022-06-21T23:47:36.453Z",
"endsAt": "2022-06-22T23:47:36.453Z",
"generatorURL": "http://controller147:9090/graph?g0.expr=up%7B"
"job%3D%22node%22%7D+%3D%3D+0&g0.tab=1",
"fingerprint": "5ef77f1f8a3ecb8d"
}
],
"groupLabels": {},
"commonLabels": {
"alertname": "NodeInstanceDown",
"job": "node"
},
"commonAnnotations": {
"description": "sample"
},
"externalURL": "http://controller147:9093",
"version": "4",
"groupKey": "{}:{}",
"truncatedAlerts": 0
}
def update_alarm():
return {
"ackState": "ACKNOWLEDGED"
}
def terminate_vnf_min():
# Omit except for required attributes
# NOTE: Only the following cardinality attributes are set.
# - 1
# - 1..N (1)
return {
"terminationType": "FORCEFUL"
}
def pm_job_min(callback_uri, inst_id, host_ip):
return {
"objectType": "Vnf",
"objectInstanceIds": [inst_id],
"criteria": {
"performanceMetric": [
f"VCpuUsageMeanVnf.{inst_id}"],
"collectionPeriod": 5,
"reportingPeriod": 10
},
"callbackUri": callback_uri,
"metadata": {
"monitoring": {
"monitorName": "prometheus",
"driverType": "external",
"targetsInfo": [
{
"prometheusHost": host_ip,
"prometheusHostPort": 50022,
"authInfo": {
"ssh_username": "root",
"ssh_password": "root"
},
"alertRuleConfigPath":
"/tmp",
"prometheusReloadApiEndpoint":
"http://localhost:9990/-/reload",
}
]
}
}
}
def pm_job_max(callback_uri, inst_id, host_ip):
return {
"objectType": "Vnf",
"objectInstanceIds": [inst_id],
"subObjectInstanceIds": [],
"criteria": {
"performanceMetric": [
f"VCpuUsageMeanVnf.{inst_id}"],
"performanceMetricGroup": ["VirtualisedComputeResource"],
"collectionPeriod": 5,
"reportingPeriod": 10,
"reportingBoundary": "2099-08-05T02:24:46Z"
},
"callbackUri": callback_uri,
"metadata": {
"monitoring": {
"monitorName": "prometheus",
"driverType": "external",
"targetsInfo": [
{
"prometheusHost": host_ip,
"prometheusHostPort": 50022,
"authInfo": {
"ssh_username": "root",
"ssh_password": "root"
},
"alertRuleConfigPath":
"/tmp",
"prometheusReloadApiEndpoint":
"http://localhost:9990/-/reload"
}
]
}
}
}
def update_pm_job(callback_uri):
return {
"callbackUri": callback_uri
}
def pm_event(job_id, inst_id):
return {
"receiver": "receiver",
"status": "firing",
"alerts": [
{
"status": "firing",
"labels": {
"receiver_type": "tacker",
"function_type": "vnfpm",
"job_id": job_id,
"metric": f"VCpuUsageMeanVnf.{inst_id}",
"object_instance_id": inst_id
},
"annotations": {
"value": 99,
},
"startsAt": "2022-06-21T23:47:36.453Z",
"endsAt": "0001-01-01T00:00:00Z",
"generatorURL": "http://controller147:9090/graph?g0.expr=up%7B"
"job%3D%22node%22%7D+%3D%3D+0&g0.tab=1",
"fingerprint": "5ef77f1f8a3ecb8d"
}
],
"groupLabels": {},
"commonLabels": {
"alertname": "NodeInstanceDown",
"job": "node"
},
"commonAnnotations": {
"description": "sample"
},
"externalURL": "http://controller147:9093",
"version": "4",
"groupKey": "{}:{}",
"truncatedAlerts": 0
}
def prometheus_auto_scaling_alert(inst_id):
return {
"receiver": "receiver",
"status": "firing",
"alerts": [{
"status": "firing",
"labels": {
"receiver_type": "tacker",
"function_type": "auto_scale",
"vnf_instance_id": inst_id,
"auto_scale_type": "SCALE_OUT",
"aspect_id": "vdu2_aspect"
},
"annotations": {
},
"startsAt": "2022-06-21T23:47:36.453Z",
"endsAt": "0001-01-01T00:00:00Z",
"generatorURL": "http://controller147:9090/graph?g0.expr="
"up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1",
"fingerprint": "5ef77f1f8a3ecb8d"
}],
"groupLabels": {},
"commonLabels": {
"alertname": "NodeInstanceDown",
"job": "node"
},
"commonAnnotations": {
"description": "sample"
},
"externalURL": "http://controller147:9093",
"version": "4",
"groupKey": "{}:{}",
"truncatedAlerts": 0
}

View File

@ -0,0 +1,115 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import os
import time
from tacker.objects import fields
from tacker.tests.functional.sol_kubernetes_v2 import base_v2
from tacker.tests.functional.sol_kubernetes_v2 import paramgen
@ddt.ddt
class PrometheusAutoScalingTest(base_v2.BaseVnfLcmKubernetesV2Test):
@classmethod
def setUpClass(cls):
super(PrometheusAutoScalingTest, cls).setUpClass()
cur_dir = os.path.dirname(__file__)
test_instantiate_cnf_resources_path = os.path.join(
cur_dir, "samples/test_instantiate_cnf_resources")
cls.vnf_pkg_1, cls.vnfd_id_1 = cls.create_vnf_package(
test_instantiate_cnf_resources_path)
@classmethod
def tearDownClass(cls):
super(PrometheusAutoScalingTest, cls).tearDownClass()
cls.delete_vnf_package(cls.vnf_pkg_1)
def setUp(self):
super(PrometheusAutoScalingTest, self).setUp()
def test_prometheus_auto_scaling_basic(self):
"""Test Prometheus Auto Scaling operations with all attributes set
* About LCM operations:
This test includes the following operations.
- 1. Create a new VNF instance resource
- 2. Instantiate a VNF instance
- 3. Prometheus Auto Scaling alert.
- 4. Terminate a VNF instance
- 5. Delete a VNF instance
"""
# 1. LCM-Create: Create a new VNF instance resource
# NOTE: extensions and vnfConfigurableProperties are omitted
# because they are commented out in etsi_nfv_sol001.
create_req = paramgen.instantiate_cnf_resources_create(self.vnfd_id_1)
resp, body = self.create_vnf_instance(create_req)
self.assertEqual(201, resp.status_code)
inst_id = body['id']
# 2. LCM-Instantiate: Instantiate a VNF instance
vim_id = self.get_k8s_vim_id()
instantiate_req = paramgen.min_sample_instantiate(vim_id)
instantiate_req['additionalParams'][
'lcm-kubernetes-def-files'] = ['Files/kubernetes/deployment.yaml']
instantiate_req['vnfConfigurableProperties'] = {
'isAutoscaleEnabled': True}
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
# 3. Send Auto-Healing alert
alert = paramgen.prometheus_auto_scaling_alert(inst_id)
# CNF scale is not integrated yet. use this value for now.
alert['alerts'][0]['labels']['aspect_id'] = 'invalid_id'
resp, body = self.prometheus_auto_scaling_alert(alert)
self.assertEqual(204, resp.status_code)
time.sleep(5)
# 4. LCM-Terminate: Terminate VNF
terminate_req = paramgen.terminate_vnf_min()
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# wait a bit because there is a bit time lag between lcmocc DB
# update and terminate completion.
time.sleep(10)
# check instantiationState of VNF
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
self.assertEqual(fields.VnfInstanceState.NOT_INSTANTIATED,
body['instantiationState'])
# 5. LCM-Delete: Delete a VNF instance
resp, body = self.delete_vnf_instance(inst_id)
self.assertEqual(204, resp.status_code)
# check deletion of VNF instance
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(404, resp.status_code)

View File

@ -0,0 +1,440 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import os
import time
from tacker.objects import fields
from tacker.tests.functional.sol_kubernetes_v2 import base_v2
from tacker.tests.functional.sol_kubernetes_v2 import paramgen
@ddt.ddt
class VnfFmTest(base_v2.BaseVnfLcmKubernetesV2Test):
@classmethod
def setUpClass(cls):
super(VnfFmTest, cls).setUpClass()
cur_dir = os.path.dirname(__file__)
test_instantiate_cnf_resources_path = os.path.join(
cur_dir, "samples/test_instantiate_cnf_resources")
cls.vnf_pkg_1, cls.vnfd_id_1 = cls.create_vnf_package(
test_instantiate_cnf_resources_path)
@classmethod
def tearDownClass(cls):
super(VnfFmTest, cls).tearDownClass()
cls.delete_vnf_package(cls.vnf_pkg_1)
def setUp(self):
super(VnfFmTest, self).setUp()
def test_faultmanagement_interface_min(self):
"""Test FM operations with all attributes set
* About attributes:
All of the following cardinality attributes are set.
In addition, 0..N or 1..N attributes are set to 2 or more.
- 0..1 (1)
- 0..N (2 or more)
- 1
- 1..N (2 or more)
* About LCM operations:
This test includes the following operations.
- 1. Create a new VNF instance resource
- 2. Instantiate a VNF instance
- 3. Create a new subscription
- 4. List subscription with attribute-based
- 5. Show subscription
- 6. Alert-Event (firing)
- 7. FM-List-Alarm
- 8. FM-Show-Alarm
- 9. FM-Update-Alarm
- 10. FM-Show-Alarm
- 11. Alert-Event (resolved)
- 12. FM-Show-Alarm
- 13. FM-Delete-Subscription: Delete subscription
- 14. Terminate a VNF instance
- 15. Delete a VNF instance
"""
# 1. LCM-Create: Create a new VNF instance resource
# NOTE: extensions and vnfConfigurableProperties are omitted
# because they are commented out in etsi_nfv_sol001.
create_req = paramgen.instantiate_cnf_resources_create(self.vnfd_id_1)
resp, body = self.create_vnf_instance(create_req)
self.assertEqual(201, resp.status_code)
inst_id = body['id']
# 2. LCM-Instantiate: Instantiate a VNF instance
vim_id = self.get_k8s_vim_id()
instantiate_req = paramgen.min_sample_instantiate(vim_id)
instantiate_req['additionalParams'][
'lcm-kubernetes-def-files'] = ['Files/kubernetes/deployment.yaml']
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
vnfc_resource_infos = body['instantiatedVnfInfo'].get(
'vnfcResourceInfo')
pod_name = [vnfc_info['computeResource']['resourceId']
for vnfc_info in vnfc_resource_infos
if vnfc_info['vduId'] == 'VDU2'][0]
# 3. FM-Create-Subscription: Create a new subscription
expected_inst_attrs = ['id', 'callbackUri', '_links']
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
sub_req = paramgen.sub_create_min(callback_uri)
resp, body = self.create_subscription(sub_req)
self.assertEqual(201, resp.status_code)
self.check_resp_headers_in_create(resp)
sub_id = body['id']
self.check_resp_body(body, expected_inst_attrs)
# Test notification
self.assert_notification_get(callback_url)
self.addCleanup(self.delete_subscription, sub_id)
# 4. FM-List-Subscription: List subscription with attribute-based
# filtering
expected_attrs = ['id', 'callbackUri', '_links']
resp, body = self.list_subscriptions()
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, expected_attrs)
# 5. FM-Show-Subscription: Show subscription
resp, body = self.show_subscription(sub_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 6. Alert-Event (firing)
alert = paramgen.alert_event_firing(inst_id, pod_name)
resp, body = self.create_fm_alarm(alert)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(callback_url, 'AlarmNotification')
# 7. FM-List-Alarm
alarm_expected_attrs = [
'id',
'managedObjectId',
'alarmRaisedTime',
'ackState',
'perceivedSeverity',
'eventTime',
'eventType',
'probableCause',
'isRootCause',
'_links'
]
resp, body = self.list_fm_alarm()
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, alarm_expected_attrs)
# 8. FM-Show-Alarm
filter_expr = {'filter': f'(eq,managedObjectId,{inst_id})'}
resp, body = self.list_fm_alarm(filter_expr)
alarm_id = body[0]['id']
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, alarm_expected_attrs)
# 9. FM-Update-Alarm
expected_attrs = [
'ackState'
]
update_req = paramgen.update_alarm()
resp, body = self.update_fm_alarm(alarm_id, update_req)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_delete(resp)
self.check_resp_body(body, expected_attrs)
# 10. FM-Show-Alarm
expected_attrs = [
'id',
'managedObjectId',
'alarmRaisedTime',
'ackState',
'perceivedSeverity',
'eventTime',
'eventType',
'probableCause',
'isRootCause',
'_links'
]
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 11. Alert-Event (resolved)
alert = paramgen.alert_event_resolved(inst_id, pod_name)
resp, body = self.create_fm_alarm(alert)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(callback_url, 'AlarmClearedNotification')
# 12. FM-Show-Alarm
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, alarm_expected_attrs)
# 13. FM-Delete-Subscription: Delete subscription
resp, body = self.delete_subscription(sub_id)
self.assertEqual(204, resp.status_code)
self.check_resp_headers_in_delete(resp)
# 14. LCM-Terminate: Terminate VNF
terminate_req = paramgen.terminate_vnf_min()
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# wait a bit because there is a bit time lag between lcmocc DB
# update and terminate completion.
time.sleep(10)
# check instantiationState of VNF
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
self.assertEqual(fields.VnfInstanceState.NOT_INSTANTIATED,
body['instantiationState'])
# 15. LCM-Delete: Delete a VNF instance
resp, body = self.delete_vnf_instance(inst_id)
self.assertEqual(204, resp.status_code)
# check deletion of VNF instance
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(404, resp.status_code)
def test_faultmanagement_interface_max(self):
"""Test FM operations with all attributes set
* About attributes:
All of the following cardinality attributes are set.
In addition, 0..N or 1..N attributes are set to 2 or more.
- 0..1 (1)
- 0..N (2 or more)
- 1
- 1..N (2 or more)
* About LCM operations:
This test includes the following operations.
- 1. Create a new VNF instance resource
- 2. Instantiate a VNF instance
- 3. Create a new subscription
- 4. List subscription with attribute-based
- 5. Show subscription
- 6. Alert-Event (firing)
- 7. FM-List-Alarm
- 8. FM-Show-Alarm
- 9. FM-Update-Alarm
- 10. FM-Show-Alarm
- 11. Alert-Event (resolved)
- 12. FM-Show-Alarm
- 13. FM-Delete-Subscription: Delete subscription
- 14. Terminate a VNF instance
- 15. Delete a VNF instance
"""
# 1. LCM-Create: Create a new VNF instance resource
# NOTE: extensions and vnfConfigurableProperties are omitted
# because they are commented out in etsi_nfv_sol001.
create_req = paramgen.instantiate_cnf_resources_create(self.vnfd_id_1)
resp, body = self.create_vnf_instance(create_req)
self.assertEqual(201, resp.status_code)
inst_id = body['id']
# 2. LCM-Instantiate: Instantiate a VNF instance
vim_id = self.get_k8s_vim_id()
instantiate_req = paramgen.min_sample_instantiate(vim_id)
instantiate_req['additionalParams'][
'lcm-kubernetes-def-files'] = ['Files/kubernetes/deployment.yaml']
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
vnfc_resource_infos = body['instantiatedVnfInfo'].get(
'vnfcResourceInfo')
pod_name = [vnfc_info['computeResource']['resourceId']
for vnfc_info in vnfc_resource_infos
if vnfc_info['vduId'] == 'VDU2'][0]
# 3. FM-Create-Subscription: Create a new subscription
expected_inst_attrs = ['id', 'callbackUri', '_links', 'filter']
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
sub_req = paramgen.sub_create_max(
callback_uri, self.vnfd_id_1, inst_id)
resp, body = self.create_subscription(sub_req)
self.assertEqual(201, resp.status_code)
self.check_resp_headers_in_create(resp)
sub_id = body['id']
self.check_resp_body(body, expected_inst_attrs)
# Test notification
self.assert_notification_get(callback_url)
# 4. FM-List-Subscription: List subscription with attribute-based
# filtering
expected_attrs = ['id', 'callbackUri', '_links', 'filter']
filter_expr = {
'filter': f'(eq,id,{sub_id})'
}
resp, body = self.list_subscriptions(filter_expr)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, expected_attrs)
# 5. FM-Show-Subscription: Show subscription
resp, body = self.show_subscription(sub_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 6. Alert-Event (firing)
alert = paramgen.alert_event_firing(inst_id, pod_name)
resp, body = self.create_fm_alarm(alert)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(callback_url, 'AlarmNotification')
# 7. FM-List-Alarm
alarm_expected_attrs = [
'id',
'managedObjectId',
'alarmRaisedTime',
'ackState',
'perceivedSeverity',
'eventTime',
'eventType',
'probableCause',
'isRootCause',
'_links'
]
filter_expr = {'filter': f'(eq,managedObjectId,{inst_id})'}
resp, body = self.list_fm_alarm(filter_expr)
self.assertEqual(200, resp.status_code)
alarm_id = body[0]['id']
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, alarm_expected_attrs)
# 8. FM-Show-Alarm
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, alarm_expected_attrs)
# 9. FM-Update-Alarm
expected_attrs = [
'ackState'
]
update_req = paramgen.update_alarm()
resp, body = self.update_fm_alarm(alarm_id, update_req)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_delete(resp)
self.check_resp_body(body, expected_attrs)
# 10. FM-Show-Alarm
expected_attrs = [
'id',
'managedObjectId',
'alarmRaisedTime',
'ackState',
'perceivedSeverity',
'eventTime',
'eventType',
'probableCause',
'isRootCause',
'_links'
]
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 11. Alert-Event (resolved)
alert = paramgen.alert_event_resolved(inst_id, pod_name)
resp, body = self.create_fm_alarm(alert)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(callback_url, 'AlarmClearedNotification')
# 12. FM-Show-Alarm
resp, body = self.show_fm_alarm(alarm_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, alarm_expected_attrs)
# 13. FM-Delete-Subscription: Delete subscription
resp, body = self.delete_subscription(sub_id)
self.assertEqual(204, resp.status_code)
self.check_resp_headers_in_delete(resp)
# 14. LCM-Terminate: Terminate VNF
terminate_req = paramgen.terminate_vnf_min()
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# wait a bit because there is a bit time lag between lcmocc DB
# update and terminate completion.
time.sleep(10)
# check instantiationState of VNF
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
self.assertEqual(fields.VnfInstanceState.NOT_INSTANTIATED,
body['instantiationState'])
# 15. LCM-Delete: Delete a VNF instance
resp, body = self.delete_vnf_instance(inst_id)
self.assertEqual(204, resp.status_code)
# check deletion of VNF instance
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(404, resp.status_code)

View File

@ -0,0 +1,345 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import os
import time
from tacker.objects import fields
from tacker.tests.functional.sol_kubernetes_v2 import base_v2
from tacker.tests.functional.sol_kubernetes_v2 import paramgen
@ddt.ddt
class VnfPmTest(base_v2.BaseVnfLcmKubernetesV2Test):
@classmethod
def setUpClass(cls):
super(VnfPmTest, cls).setUpClass()
cur_dir = os.path.dirname(__file__)
test_instantiate_cnf_resources_path = os.path.join(
cur_dir, "samples/test_instantiate_cnf_resources")
cls.vnf_pkg_1, cls.vnfd_id_1 = cls.create_vnf_package(
test_instantiate_cnf_resources_path)
@classmethod
def tearDownClass(cls):
super(VnfPmTest, cls).tearDownClass()
cls.delete_vnf_package(cls.vnf_pkg_1)
def setUp(self):
super(VnfPmTest, self).setUp()
base_v2.FAKE_SERVER_MANAGER.set_callback(
'PUT', "/-/reload", status_code=202,
response_headers={"Content-Type": "text/plain"})
def test_performancemanagement_interface_min(self):
"""Test PM operations with all attributes set
* About attributes:
All of the following cardinality attributes are set.
In addition, 0..N or 1..N attributes are set to 2 or more.
- 0..1 (1)
- 0..N (2 or more)
- 1
- 1..N (2 or more)
* About LCM operations:
This test includes the following operations.
- 1. Create a new VNF instance resource
- 2. Instantiate a VNF instance
- 3. PMJob-Create
- 4. PMJob-Update
- 5. PM-Event
- 6. PMJob-List
- 7. PMJob-Show
- 8. PMJob-Report-Show
- 9. PMJob-Delete
- 10. Terminate a VNF instance
- 11. Delete a VNF instance
"""
# 1. LCM-Create: Create a new VNF instance resource
# NOTE: extensions and vnfConfigurableProperties are omitted
# because they are commented out in etsi_nfv_sol001.
create_req = paramgen.pm_instantiate_cnf_resources_create(
self.vnfd_id_1)
resp, body = self.create_vnf_instance(create_req)
self.assertEqual(201, resp.status_code)
inst_id = body['id']
# 2. LCM-Instantiate: Instantiate a VNF instance
vim_id = self.get_k8s_vim_id()
instantiate_req = paramgen.min_sample_instantiate(vim_id)
instantiate_req['additionalParams'][
'lcm-kubernetes-def-files'] = ['Files/kubernetes/deployment.yaml']
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# 3. PMJob-Create
pm_expected_attrs = [
'id',
'objectType',
'objectInstanceIds',
'criteria',
'callbackUri',
'_links'
]
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
sub_req = paramgen.pm_job_min(
callback_uri, inst_id, self.fake_prometheus_ip)
resp, body = self.create_pm_job(sub_req)
self.assertEqual(201, resp.status_code)
self.check_resp_headers_in_create(resp)
self.check_resp_body(body, pm_expected_attrs)
# Test notification
self.assert_notification_get(callback_url)
pm_job_id = body.get('id')
# 4. PMJob-Update
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_url = callback_url + '_1'
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
base_v2.FAKE_SERVER_MANAGER.set_callback(
'GET', callback_url, status_code=204)
base_v2.FAKE_SERVER_MANAGER.set_callback(
'POST', callback_url, status_code=204)
update_req = paramgen.update_pm_job(callback_uri)
resp, body = self.update_pm_job(pm_job_id, update_req)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_delete(resp)
# Test notification
self.assert_notification_get(callback_url)
# 5. PMJob-Event
sub_req = paramgen.pm_event(pm_job_id, inst_id)
resp, body = self.create_pm_event(sub_req)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(
callback_url, 'PerformanceInformationAvailableNotification')
# 6. PMJob-List
resp, body = self.list_pm_job()
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, pm_expected_attrs)
# 7. PMJob-Show
resp, body = self.show_pm_job(pm_job_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, pm_expected_attrs)
reports = body['reports']
href = reports[0]['href']
report_id = href.split('/')[-1]
# 8. PMJob-Report-Show
expected_attrs = ['entries']
resp, body = self.show_pm_job_report(pm_job_id, report_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 9. PMJob-Delete
resp, body = self.delete_pm_job(pm_job_id)
self.assertEqual(204, resp.status_code)
self.check_resp_headers_in_delete(resp)
# 10. LCM-Terminate: Terminate VNF
terminate_req = paramgen.terminate_vnf_min()
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# wait a bit because there is a bit time lag between lcmocc DB
# update and terminate completion.
time.sleep(10)
# check instantiationState of VNF
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
self.assertEqual(fields.VnfInstanceState.NOT_INSTANTIATED,
body['instantiationState'])
# 11. LCM-Delete: Delete a VNF instance
resp, body = self.delete_vnf_instance(inst_id)
self.assertEqual(204, resp.status_code)
# check deletion of VNF instance
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(404, resp.status_code)
def test_performancemanagement_interface_max(self):
"""Test PM operations with all attributes set
* About attributes:
All of the following cardinality attributes are set.
In addition, 0..N or 1..N attributes are set to 2 or more.
- 0..1 (1)
- 0..N (2 or more)
- 1
- 1..N (2 or more)
* About LCM operations:
This test includes the following operations.
- 1. Create a new VNF instance resource
- 2. Instantiate a VNF instance
- 3. PMJob-Create
- 4. PMJob-Update
- 5. PM-Event
- 6. PMJob-List
- 7. PMJob-Show
- 8. PMJob-Report-Show
- 9. PMJob-Delete
- 10. Terminate a VNF instance
- 11. Delete a VNF instance
"""
# 1. LCM-Create: Create a new VNF instance resource
# NOTE: extensions and vnfConfigurableProperties are omitted
# because they are commented out in etsi_nfv_sol001.
create_req = paramgen.instantiate_cnf_resources_create(self.vnfd_id_1)
resp, body = self.create_vnf_instance(create_req)
self.assertEqual(201, resp.status_code)
inst_id = body['id']
# 2. LCM-Instantiate: Instantiate a VNF instance
vim_id = self.get_k8s_vim_id()
instantiate_req = paramgen.min_sample_instantiate(vim_id)
instantiate_req['additionalParams'][
'lcm-kubernetes-def-files'] = ['Files/kubernetes/deployment.yaml']
resp, body = self.instantiate_vnf_instance(inst_id, instantiate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# 3. PMJob-Create
pm_expected_attrs = [
'id',
'objectType',
'objectInstanceIds',
'criteria',
'callbackUri',
'_links'
]
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
sub_req = paramgen.pm_job_max(
callback_uri, inst_id, self.fake_prometheus_ip)
resp, body = self.create_pm_job(sub_req)
self.assertEqual(201, resp.status_code)
self.check_resp_headers_in_create(resp)
self.check_resp_body(body, pm_expected_attrs)
# Test notification
self.assert_notification_get(callback_url)
pm_job_id = body.get('id')
# 4. PMJob-Update
callback_url = os.path.join(base_v2.MOCK_NOTIFY_CALLBACK_URL,
self._testMethodName)
callback_url = callback_url + '_1'
callback_uri = ('http://localhost:'
f'{base_v2.FAKE_SERVER_MANAGER.SERVER_PORT}'
f'{callback_url}')
base_v2.FAKE_SERVER_MANAGER.set_callback(
'GET', callback_url, status_code=204)
base_v2.FAKE_SERVER_MANAGER.set_callback(
'POST', callback_url, status_code=204)
update_req = paramgen.update_pm_job(callback_uri)
resp, body = self.update_pm_job(pm_job_id, update_req)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_delete(resp)
# Test notification
self.assert_notification_get(callback_url)
# 5. PMJob-Event
sub_req = paramgen.pm_event(pm_job_id, inst_id)
resp, body = self.create_pm_event(sub_req)
self.assertEqual(204, resp.status_code)
time.sleep(5)
self._check_notification(
callback_url, 'PerformanceInformationAvailableNotification')
# 6. PMJob-List
filter_expr = {'filter': '(eq,objectType,VirtualCompute)'}
resp, body = self.list_pm_job(filter_expr)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
for sbsc in body:
self.check_resp_body(sbsc, pm_expected_attrs)
# 7. PMJob-Show
resp, body = self.show_pm_job(pm_job_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, pm_expected_attrs)
reports = body['reports']
href = reports[0]['href']
report_id = href.split('/')[-1]
# 8. PMJob-Show-Report
expected_attrs = ['entries']
resp, body = self.show_pm_job_report(pm_job_id, report_id)
self.assertEqual(200, resp.status_code)
self.check_resp_headers_in_get(resp)
self.check_resp_body(body, expected_attrs)
# 9. PMJob-Delete
resp, body = self.delete_pm_job(pm_job_id)
self.assertEqual(204, resp.status_code)
self.check_resp_headers_in_delete(resp)
# 10. LCM-Terminate: Terminate VNF
terminate_req = paramgen.terminate_vnf_min()
resp, body = self.terminate_vnf_instance(inst_id, terminate_req)
self.assertEqual(202, resp.status_code)
lcmocc_id = os.path.basename(resp.headers['Location'])
self.wait_lcmocc_complete(lcmocc_id)
# wait a bit because there is a bit time lag between lcmocc DB
# update and terminate completion.
time.sleep(10)
# check instantiationState of VNF
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(200, resp.status_code)
self.assertEqual(fields.VnfInstanceState.NOT_INSTANTIATED,
body['instantiationState'])
# 11. LCM-Delete: Delete a VNF instance
resp, body = self.delete_vnf_instance(inst_id)
self.assertEqual(204, resp.status_code)
# check deletion of VNF instance
resp, body = self.show_vnf_instance(inst_id)
self.assertEqual(404, resp.status_code)

View File

@ -25,8 +25,9 @@ class TestAPIVersion(base.BaseTestCase):
self.assertTrue(vers.is_null())
def test_init(self):
supported_versions = ["3.1.4159", "2.0.0"]
supported_versions = ["3.1.4159", "2.0.0", "2.1.0"]
for vers, vers_str in [("2.0.0", "2.0.0"),
("2.1.0", "2.1.0"),
("3.1.4159", "3.1.4159"),
("2.0.0-impl:foobar", "2.0.0")]:
v = api_version.APIVersion(vers, supported_versions)
@ -34,6 +35,14 @@ class TestAPIVersion(base.BaseTestCase):
def test_init_exceptions(self):
supported_versions = ["2.0.0"]
self.assertRaises(sol_ex.APIVersionMissing,
api_version.APIVersion, None, supported_versions)
self.assertRaises(sol_ex.InvalidAPIVersionString,
api_version.APIVersion,
"2.0.0-abc:foobar",
["2.0.0"])
self.assertRaises(sol_ex.InvalidAPIVersionString,
api_version.APIVersion, "0.1.2", supported_versions)
@ -50,11 +59,19 @@ class TestAPIVersion(base.BaseTestCase):
def test_matches(self):
supported_versions = ["1.3.0", "1.3.1", "2.0.0"]
vers = api_version.APIVersion("2.0.0")
vers = api_version.APIVersion("1.3.1")
self.assertTrue(
vers.matches(api_version.APIVersion(), api_version.APIVersion())
)
self.assertTrue(
vers.matches(api_version.APIVersion("1.3.0", supported_versions),
api_version.APIVersion()))
self.assertFalse(
self.assertTrue(
vers.matches(api_version.APIVersion(),
api_version.APIVersion("1.3.1", supported_versions)))
api_version.APIVersion("2.0.0", supported_versions)))
self.assertTrue(
vers.matches(api_version.APIVersion("1.3.0", supported_versions),
api_version.APIVersion("2.0.0", supported_versions)))

View File

@ -0,0 +1,88 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker import context
from tacker.sol_refactored.api import prometheus_plugin_wsgi as pp_wsgi
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored import objects
from tacker.tests.unit import base
from unittest import mock
class TestPrometheusPlugin(base.TestCase):
def setUp(self):
super(TestPrometheusPlugin, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
@mock.patch.object(pp_wsgi.PrometheusPluginErrorResponse, 'serialize')
def test_response(self, mock_serialize_pp):
class _Test():
def __init__(self, ctx, title):
self.status = 200
self.detail = 'detail'
self.title = title
self.method = 'GET'
self.url = 'url'
self.environ = None
self.body = {}
self.context = ctx
self.status_int = 200
def best_match_content_type(self):
return 'application/json'
def serialize(self, accept):
if self.title == 'error':
raise sol_ex.SolValidationError(
detail='test error')
return self
def test(*args, **kwargs):
return (None, None, None)
def test2(*args, **kwargs):
return _Test(None, None)
def test3(*args, **kwargs):
return _Test(None, 'error')
# make responses
pp_wsgi.PrometheusPluginResponse(
200, {}, content_type='content_type')
pp_wsgi.PrometheusPluginErrorResponse(
_Test(self.context, None), None)
pp_wsgi.PrometheusPluginErrorResponse(
_Test(self.context, 'title'), None)
# no error
p = pp_wsgi.PrometheusPluginResource(
None, 'tacker_prometheus_plugin_api:prometheus_plugin:alert')
p(_Test(self.context, None))
# raise unknown error
p = pp_wsgi.PrometheusPluginResource(
None, 'tacker_prometheus_plugin_api:prometheus_plugin:alert')
p._deserialize_request = test
p._check_policy = test
p._dispatch = test2
p(_Test(self.context, None))
mock_serialize_pp.side_effect = _Test(self.context, 'error')
p._dispatch = test3
p(_Test(self.context, 'error'))

View File

@ -0,0 +1,102 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from tacker import context
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored import objects
from tacker.tests import base
from tacker.tests.unit.sol_refactored.samples import fakes_for_fm
class TestFmAlarmUtils(base.BaseTestCase):
def setUp(self):
super(TestFmAlarmUtils, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_get_alarm(self, mock_alarm):
mock_alarm.return_value = objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)
result = alarm_utils.get_alarm(
context, fakes_for_fm.alarm_example['id'])
self.assertEqual(fakes_for_fm.alarm_example['id'], result.id)
mock_alarm.return_value = None
self.assertRaises(
sol_ex.AlarmNotFound,
alarm_utils.get_alarm, context, fakes_for_fm.alarm_example['id'])
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
def test_get_alarms_all(self, mock_alarms):
mock_alarms.return_value = [objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)]
result = alarm_utils.get_alarms_all(context)
self.assertEqual(fakes_for_fm.alarm_example['id'], result[0].id)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_filter')
def test_get_not_cleared_alarms(self, mock_alarms):
mock_alarms.return_value = [objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)]
result = alarm_utils.get_not_cleared_alarms(
context, fakes_for_fm.alarm_example['managedObjectId'])
self.assertEqual(fakes_for_fm.alarm_example['id'], result[0].id)
def test_make_alarm_links(self):
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
endpoint = 'http://127.0.0.1:9890'
expected_result = objects.AlarmV1_Links()
expected_result.self = objects.Link(
href=f'{endpoint}/vnffm/v1/alarms/{alarm.id}')
expected_result.objectInstance = objects.Link(
href=f'{endpoint}/vnflcm/v2/vnf_instances/{alarm.managedObjectId}')
result = alarm_utils.make_alarm_links(alarm, endpoint)
self.assertEqual(expected_result.self.href, result.self.href)
self.assertEqual(expected_result.objectInstance.href,
result.objectInstance.href)
def test_make_alarm_notif_data(self):
subsc = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
endpoint = 'http://127.0.0.1:9890'
# execute alarm_cleared
alarm_cleared_result = alarm_utils.make_alarm_notif_data(
subsc, alarm, endpoint)
# execute alarm
alarm_clear = copy.deepcopy(fakes_for_fm.alarm_example)
del alarm_clear['alarmClearedTime']
alarm = objects.AlarmV1.from_dict(alarm_clear)
alarm_result = alarm_utils.make_alarm_notif_data(
subsc, alarm, endpoint)
self.assertEqual('AlarmClearedNotificationV1',
type(alarm_cleared_result).__name__)
self.assertEqual('AlarmClearedNotification',
alarm_cleared_result.notificationType)
self.assertEqual('AlarmNotificationV1', type(alarm_result).__name__)
self.assertEqual('AlarmNotification',
alarm_result.notificationType)
self.assertEqual(alarm_clear, alarm_result.alarm.to_dict())

View File

@ -0,0 +1,266 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import requests
from unittest import mock
from oslo_log import log as logging
from tacker import context
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored.common import fm_subscription_utils as subsc_utils
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored import objects
from tacker.tests import base
from tacker.tests.unit.sol_refactored.samples import fakes_for_fm
LOG = logging.getLogger(__name__)
class TestFmSubscriptionUtils(base.BaseTestCase):
def setUp(self):
super(TestFmSubscriptionUtils, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_get_subsc(self, mock_subsc):
mock_subsc.return_value = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
result = subsc_utils.get_subsc(
context, fakes_for_fm.fm_subsc_example['id'])
self.assertEqual(fakes_for_fm.fm_subsc_example['id'], result.id)
mock_subsc.return_value = None
self.assertRaises(
sol_ex.FmSubscriptionNotFound,
subsc_utils.get_subsc, context, 'subsc-1')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
def test_get_subsc_all(self, mock_subsc):
mock_subsc.return_value = [objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)]
result = subsc_utils.get_subsc_all(context)
self.assertEqual(fakes_for_fm.fm_subsc_example['id'], result[0].id)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_send_notification(self, mock_resp):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
notif_data_no_auth = alarm_utils.make_alarm_notif_data(
subsc_no_auth, alarm, 'http://127.0.0.1:9890')
resp_no_auth = requests.Response()
resp_no_auth.status_code = 204
mock_resp.return_value = (resp_no_auth, None)
# execute no_auth
subsc_utils.send_notification(subsc_no_auth, notif_data_no_auth)
subsc_basic_auth = copy.deepcopy(subsc_no_auth)
subsc_basic_auth.authentication = objects.SubscriptionAuthentication(
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test', password='test'))
# execute basic_auth
subsc_utils.send_notification(subsc_basic_auth, notif_data_no_auth)
subsc_oauth2 = copy.deepcopy(subsc_no_auth)
subsc_oauth2.authentication = objects.SubscriptionAuthentication(
paramsOauth2ClientCredentials=(
objects.SubscriptionAuthentication_ParamsOauth2(
clientId='test', clientPassword='test',
tokenEndpoint='http://127.0.0.1/token')))
# execute oauth2
subsc_utils.send_notification(subsc_oauth2, notif_data_no_auth)
self.assertEqual(3, mock_resp.call_count)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_send_notification_error_code(self, mock_resp):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
notif_data_no_auth = alarm_utils.make_alarm_notif_data(
subsc_no_auth, alarm, 'http://127.0.0.1:9890')
resp_no_auth = requests.Response()
resp_no_auth.status_code = 200
mock_resp.return_value = (resp_no_auth, None)
# execute no_auth
subsc_utils.send_notification(subsc_no_auth, notif_data_no_auth)
self.assertLogs(LOG, 'ERROR')
def test_send_notification_error(self):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
notif_data_no_auth = alarm_utils.make_alarm_notif_data(
subsc_no_auth, alarm, 'http://127.0.0.1:9890')
# execute no_auth
subsc_utils.send_notification(subsc_no_auth, notif_data_no_auth)
self.assertLogs(LOG, 'EXCEPTION')
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_test_notification(self, mock_resp):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
resp_no_auth = requests.Response()
resp_no_auth.status_code = 204
mock_resp.return_value = (resp_no_auth, None)
# execute no_auth
subsc_utils.test_notification(subsc_no_auth)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_test_notification_error_code(self, mock_resp):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
resp_no_auth = requests.Response()
resp_no_auth.status_code = 200
mock_resp.return_value = (resp_no_auth, None)
# execute no_auth
self.assertRaises(sol_ex.TestNotificationFailed,
subsc_utils.test_notification, subsc_no_auth)
class mock_session():
def request(url, method, raise_exc=False, **kwargs):
resp = requests.Response()
resp.status_code = 400
resp.headers['Content-Type'] = 'application/zip'
return resp
@mock.patch.object(http_client.HttpClient, '_decode_body')
@mock.patch.object(http_client.NoAuthHandle, 'get_session')
def test_test_notification_error(self, mock_session, mock_decode_body):
subsc_no_auth = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
mock_session.return_value = self.mock_session
mock_decode_body.return_value = None
self.assertRaises(sol_ex.TestNotificationFailed,
subsc_utils.test_notification, subsc_no_auth)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
def test_get_matched_subscs(self, mock_subscs):
inst = objects.VnfInstanceV2(id='test-instance', vnfProvider='company')
notif_type = 'AlarmClearedNotification'
new_alarm_example = copy.deepcopy(fakes_for_fm.alarm_example)
new_alarm_example['perceivedSeverity'] = 'CRITICAL'
new_alarm_example['eventType'] = 'COMMUNICATIONS_ALARM'
alarm = objects.AlarmV1.from_dict(new_alarm_example)
subscs_no_fileter = objects.FmSubscriptionV1(id='subsc-1')
products_vnfproducts_no_exist = objects._VnfProductsFromProviders(
vnfProvider='company')
inst_filter_match_products = objects.VnfInstanceSubscriptionFilter(
vnfProductsFromProviders=[products_vnfproducts_no_exist])
subscs_filter_match = objects.FmSubscriptionV1(
id='subsc-2',
filter=objects.FmNotificationsFilterV1(
vnfInstanceSubscriptionFilter=inst_filter_match_products))
products_mismatch = objects._VnfProductsFromProviders(
vnfProvider='test')
inst_filter_mismatch_products = objects.VnfInstanceSubscriptionFilter(
vnfProductsFromProviders=[products_mismatch])
subscs_filter_mismatch = objects.FmSubscriptionV1(
id='subsc-3',
filter=objects.FmNotificationsFilterV1(
vnfInstanceSubscriptionFilter=inst_filter_mismatch_products))
subscs_noti_type_match = objects.FmSubscriptionV1(
id='subsc-4', filter=objects.FmNotificationsFilterV1(
notificationTypes=['AlarmClearedNotification']))
subscs_noti_type_mismatch = objects.FmSubscriptionV1(
id='subsc-5', filter=objects.FmNotificationsFilterV1(
notificationTypes=['AlarmNotification']))
subscs_faulty_res_type_match = objects.FmSubscriptionV1(
id='subsc-6', filter=objects.FmNotificationsFilterV1(
faultyResourceTypes=['COMPUTE']))
subscs_faulty_res_type_mismatch = objects.FmSubscriptionV1(
id='subsc-7', filter=objects.FmNotificationsFilterV1(
faultyResourceTypes=['STORAGE']))
subscs_per_sev_match = objects.FmSubscriptionV1(
id='subsc-8', filter=objects.FmNotificationsFilterV1(
perceivedSeverities=['CRITICAL']))
subscs_per_sev_mismatch = objects.FmSubscriptionV1(
id='subsc-9', filter=objects.FmNotificationsFilterV1(
perceivedSeverities=['MAJOR']))
subscs_event_type_match = objects.FmSubscriptionV1(
id='subsc-10', filter=objects.FmNotificationsFilterV1(
eventTypes=['COMMUNICATIONS_ALARM']))
subscs_event_type_mismatch = objects.FmSubscriptionV1(
id='subsc-11', filter=objects.FmNotificationsFilterV1(
eventTypes=['PROCESSING_ERROR_ALARM']))
subscs_probable_cause_match = objects.FmSubscriptionV1(
id='subsc-12', filter=objects.FmNotificationsFilterV1(
probableCauses=['The server cannot be connected.']))
subscs_probable_cause_mismatch = objects.FmSubscriptionV1(
id='subsc-13', filter=objects.FmNotificationsFilterV1(
probableCauses=['The server is invalid.']))
mock_subscs.return_value = [
subscs_no_fileter, subscs_filter_match, subscs_filter_mismatch,
subscs_noti_type_match, subscs_noti_type_mismatch,
subscs_faulty_res_type_match, subscs_faulty_res_type_mismatch,
subscs_per_sev_match, subscs_per_sev_mismatch,
subscs_event_type_match, subscs_event_type_mismatch,
subscs_probable_cause_match, subscs_probable_cause_mismatch]
result = subsc_utils.get_matched_subscs(
context, inst, notif_type, alarm)
expected_ids = ['subsc-1', 'subsc-2', 'subsc-4', 'subsc-6',
'subsc-8', 'subsc-10', 'subsc-12']
result_ids = [sub.id for sub in result]
self.assertEqual(expected_ids, result_ids)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
def test_get_alarm_subscs(self, mock_subscs):
inst = objects.VnfInstanceV2(
id='dummy-vnfInstanceId-1', vnfdId='dummy-vnfdId-1',
vnfProvider='dummy-vnfProvider-1',
vnfProductName='dummy-vnfProductName-1-1',
vnfSoftwareVersion='1.0', vnfdVersion='1.0',
vnfInstanceName='dummy-vnfInstanceName-1')
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
mock_subscs.return_value = [objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)]
result = subsc_utils.get_alarm_subscs(context, alarm, inst)
self.assertEqual(fakes_for_fm.fm_subsc_example['id'], result[0].id)
alarm_clear = copy.deepcopy(fakes_for_fm.alarm_example)
del alarm_clear['alarmClearedTime']
alarm = objects.AlarmV1.from_dict(alarm_clear)
result = subsc_utils.get_alarm_subscs(context, alarm, inst)
self.assertEqual(fakes_for_fm.fm_subsc_example['id'], result[0].id)

View File

@ -0,0 +1,318 @@
# Copyright (C) 2022 FUJITSU
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import uuidutils
import requests
from unittest import mock
from tacker import context
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored import objects
from tacker.tests import base
class TestPmJobUtils(base.BaseTestCase):
def setUp(self):
super(TestPmJobUtils, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.context.api_version = api_version.APIVersion('2.1.0')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_update_report(self, mock_pm):
_PmJobCriteriaV2 = objects.VnfPmJobCriteriaV2(
performanceMetric=['VCpuUsageMeanVnf.VNF'],
performanceMetricGroup=['VirtualisedComputeResource'],
collectionPeriod=10,
reportingPeriod=11,
reportingBoundary='2000-05-23',
)
_SubscriptionAuthentication = objects.SubscriptionAuthentication(
authType=['BASIC'],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
username='test_name',
password='test_pwd'
)
)
mock_pm.return_value = objects.PmJobV2(
id='pm_job_1',
objectType='VNF',
objectInstanceIds=['id_1'],
subObjectInstanceIds=['sub_id_1', 'sub_id_2'],
criteria=_PmJobCriteriaV2,
callbackUri='callbackuri',
authentication=_SubscriptionAuthentication
)
report = objects.PerformanceReportV2(
id=uuidutils.generate_uuid(),
jobId='pm_job_1',
)
result = pm_job_utils.update_report(self.context, 'pm_job_1',
report, '2008-01-03 08:04:34')
self.assertEqual('pm_job_1', result.id)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
def test_get_pm_job_all(self, mock_pm):
mock_pm.return_value = [objects.PmJobV2(id='pm_job_1')]
result = pm_job_utils.get_pm_job_all(context)
self.assertEqual('pm_job_1', result[0].id)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_get_pm_job(self, mock_pm):
mock_pm.return_value = objects.PmJobV2(id='pm_job_1')
result = pm_job_utils.get_pm_job(context, 'pm_job_1')
self.assertEqual('pm_job_1', result.id)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_get_pm_job_error(self, mock_pm):
mock_pm.return_value = None
self.assertRaises(
sol_ex.PMJobNotExist,
pm_job_utils.get_pm_job, context=context, pm_job_id='pm_job-1'
)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_filter')
def test_get_pm_report(self, mock_pm):
mock_pm.return_value = [objects.PerformanceReportV2(id='report_1',
jobId='pm_job_1')]
result = pm_job_utils.get_pm_report(context, 'pm_job_1', 'report_1')
self.assertEqual('pm_job_1', result.jobId)
result = pm_job_utils.get_pm_report(context, 'pm_job_1')
self.assertEqual('pm_job_1', result[0].jobId)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_filter')
def test_get_pm_report_error(self, mock_pm):
mock_pm.return_value = None
self.assertRaises(
sol_ex.PMReportNotExist,
pm_job_utils.get_pm_report, context=context,
pm_job_id='pm_job_1', report_id='report_1'
)
def test_pm_job_href(self):
result = pm_job_utils.pm_job_href('pm_job_1', 'endpoint')
self.assertEqual('endpoint/vnfpm/v2/pm_jobs/pm_job_1', result)
def test_pm_job_links(self):
pm_job = objects.PmJobV2(id='pm_job_1', objectInstanceIds=["id_1"])
result = pm_job_utils.make_pm_job_links(pm_job, 'endpoint')
href = result.self.href
self.assertEqual('endpoint/vnfpm/v2/pm_jobs/pm_job_1', href)
def test_get_notification_auth_handle(self):
pm_job = objects.PmJobV2(id='pm_job_1')
result = pm_job_utils._get_notification_auth_handle(pm_job)
res = type(result).__name__
name = type(http_client.NoAuthHandle()).__name__
self.assertEqual(name, res)
pm_job_1_auth = objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test',
)
)
pm_job_1 = objects.PmJobV2(
id='pm_job_1',
authentication=pm_job_1_auth)
result = pm_job_utils._get_notification_auth_handle(pm_job_1)
res = type(result).__name__
name = type(http_client.BasicAuthHandle('test', 'test')).__name__
self.assertEqual(name, res)
pm_job_2 = objects.PmJobV2(
id='pm_job_2',
authentication=objects.SubscriptionAuthentication(
authType=["OAUTH2_CLIENT_CREDENTIALS"],
paramsOauth2ClientCredentials=(
objects.SubscriptionAuthentication_ParamsOauth2(
clientId='test',
clientPassword='test',
tokenEndpoint='http://127.0.0.1/token'
))
)
)
result = pm_job_utils._get_notification_auth_handle(pm_job_2)
res = type(result).__name__
name = type(http_client.OAuth2AuthHandle(
None, 'tokenEndpoint', 'test', 'test')).__name__
self.assertEqual(name, res)
pm_job_3 = objects.PmJobV2(id='pm_job_3',
authentication=(
objects.SubscriptionAuthentication(
authType=["TLS_CERT"],
))
)
result = pm_job_utils._get_notification_auth_handle(pm_job_3)
self.assertEqual(None, result)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_test_notification(self, mock_do_request):
resp_no_auth = requests.Response()
resp_no_auth.status_code = 204
mock_do_request.return_value = (resp_no_auth, None)
pm_job = objects.PmJobV2(
id='pm_job_1',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
)
),
callbackUri='http://127.0.0.1/callback'
)
pm_job_utils.test_notification(pm_job)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_test_notification_error_code(self, mock_do_request):
# execute not 204
resp_no_auth = requests.Response()
resp_no_auth.status_code = 500
mock_do_request.return_value = (resp_no_auth, None)
pm_job = objects.PmJobV2(
id='pm_job_1',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
)
),
callbackUri='http://127.0.0.1/callback'
)
self.assertRaises(sol_ex.TestNotificationFailed,
pm_job_utils.test_notification, pm_job=pm_job)
class mock_session():
def request(url, method, raise_exc=False, **kwargs):
resp = requests.Response()
resp.status_code = 400
resp.headers['Content-Type'] = 'application/zip'
return resp
@mock.patch.object(http_client.HttpClient, '_decode_body')
@mock.patch.object(http_client.BasicAuthHandle, 'get_session')
def test_test_notification_error(self, mock_session, mock_decode_body):
# execute not 204
mock_session.return_value = self.mock_session
mock_decode_body.return_value = None
pm_job = objects.PmJobV2(
id='pm_job_1',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
),
callbackUri='http://127.0.0.1/callback'
)
self.assertRaises(sol_ex.TestNotificationFailed,
pm_job_utils.test_notification, pm_job=pm_job)
def test_make_pm_notif_data(self):
sub_instance_ids = ['1', '2', '3', '4']
pm_job = objects.PmJobV2(id='pm_job_1',
objectType='VNF'
)
result = pm_job_utils.make_pm_notif_data('instance_id',
sub_instance_ids,
'report_id',
pm_job,
'2008-01-03 08:04:34',
'endpoint')
self.assertEqual('instance_id', result.objectInstanceId)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_send_notification(self, mock_resp):
pm_job = objects.PmJobV2(id='pm_job_1',
objectType='VNF',
callbackUri='http://127.0.0.1/callback'
)
sub_instance_ids = ['1', '2', '3', '4']
notif_data = pm_job_utils.make_pm_notif_data('instance_id',
sub_instance_ids,
'report_id',
pm_job,
'2008-01-03 08:04:34',
'endpoint')
resp_no_auth = requests.Response()
resp_no_auth.status_code = 204
mock_resp.return_value = (resp_no_auth, None)
# execute no_auth
pm_job_utils.send_notification(pm_job, notif_data)
pm_job = objects.PmJobV2(
id='pm_job_1',
objectType='VNF',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
),
callbackUri='http://127.0.0.1/callback'
)
sub_instance_ids = ['1', '2', '3', '4']
notif_data = pm_job_utils.make_pm_notif_data('instance_id',
sub_instance_ids,
'report_id',
pm_job,
'2008-01-03 08:04:34',
'endpoint')
resp_no_auth = requests.Response()
resp_no_auth.status_code = 204
mock_resp.return_value = (resp_no_auth, None)
# execute basic_auth
pm_job_utils.send_notification(pm_job, notif_data)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_send_notification_error(self, mock_resp):
pm_job = objects.PmJobV2(
id='pm_job_1',
objectType='VNF',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
),
callbackUri='http://127.0.0.1/callback'
)
sub_instance_ids = ['1', '2', '3', '4']
notif_data = pm_job_utils.make_pm_notif_data('instance_id',
sub_instance_ids,
'report_id',
pm_job,
'2008-01-03 08:04:34',
'endpoint')
resp_no_auth = requests.Response()
resp_no_auth.status_code = Exception()
mock_resp.return_value = (resp_no_auth, None)
# execute basic_auth
pm_job_utils.send_notification(pm_job, notif_data)

View File

@ -0,0 +1,774 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import freezegun
import paramiko
import sys
import webob
from tacker import context
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import monitoring_plugin_base as mon_base
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.common import prometheus_plugin
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored import objects
from tacker.tests.unit import base
from unittest import mock
_body_base = {
'receiver': 'receiver',
'status': 'firing',
'alerts': [
],
'groupLabels': {},
'commonLabels': {
'alertname': 'NodeInstanceDown',
'job': 'node'
},
'commonAnnotations': {
'description': 'sample'
},
'externalURL': 'http://controller147:9093',
'version': '4',
'groupKey': '{}:{}',
'truncatedAlerts': 0
}
_body_pm_alert1 = {
'status': 'firing',
'labels': {
'receiver_type': 'tacker',
'function_type': 'vnfpm',
'job_id': '64e46b0e-887a-4691-8d2b-aa3d7b157e2c',
'metric': 'VCpuUsageMeanVnf.'
'25b9b9d0-2461-4109-866e-a7767375415b',
'object_instance_id': '25b9b9d0-2461-4109-866e-a7767375415b'
},
'annotations': {
'value': 99,
},
'startsAt': '2022-06-21T23:47:36.453Z',
'endsAt': '0001-01-01T00:00:00Z',
'generatorURL': 'http://controller147:9090/graph?g0.expr='
'up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1',
'fingerprint': '5ef77f1f8a3ecb8d'
}
# function_type mismatch
_body_pm_alert2 = copy.deepcopy(_body_pm_alert1)
_body_pm_alert2['labels']['function_type'] = 'vnffm'
# object_instance_id mismatch
_body_pm_alert3 = copy.deepcopy(_body_pm_alert1)
_body_pm_alert3['labels']['object_instance_id'] = 'obj_instance_mismatch'
# object_instance_id mismatch
_body_pm_alert4 = copy.deepcopy(_body_pm_alert1)
_body_pm_alert4['labels']['sub_object_instance_id'] = 'sub_object_mismatch'
_body_pm_alert5 = copy.deepcopy(_body_pm_alert1)
_body_pm_alert5['labels']['metric'] = 'ByteIncomingVnfIntCp'
_body_pm_alert6 = copy.deepcopy(_body_pm_alert1)
_body_pm_alert6['labels']['metric'] = 'InvalidMetric'
_body_pm1 = copy.copy(_body_base)
_body_pm1.update({
'alerts': [
_body_pm_alert1, _body_pm_alert2, _body_pm_alert3, _body_pm_alert4]
})
_body_pm2 = copy.copy(_body_base)
_body_pm2.update({
'alerts': [_body_pm_alert5, _body_pm_alert6]
})
_pm_job = {
'id': 'job_id',
'objectType': 'Vnf',
'objectInstanceIds': ['25b9b9d0-2461-4109-866e-a7767375415b'],
'subObjectInstanceIds': [],
'criteria': {
'performanceMetric': [
'VcpuUsageMeanVnf.25b9b9d0-2461-4109-866e-a7767375415b'
],
'performanceMetricGroup': [
'VirtualizedComputeResource',
'InvalidGroupName'
],
'collectionPeriod': 15,
'reportingPeriod': 30,
'reportingBoundary': '2022-06-23T04:56:00.910Z'
},
'callbackUri': '',
'reports': [],
'metadata': {
'monitoring': {
'monitorName': 'prometheus',
'driverType': 'external',
'targetsInfo': [
{
'prometheusHost':
'prometheusHost',
'prometheusHostPort': '22',
'authInfo': {
'ssh_username': 'ssh_username',
'ssh_password': 'ssh_password'
},
'alertRuleConfigPath':
'alertRuleConfigPath',
'prometheusReloadApiEndpoint':
'prometheusReloadApiEndpoint'
},
{
# invalid access info
'prometheusHost':
'prometheusHost',
}
]
}
}
}
_pm_job2 = copy.deepcopy(_pm_job)
_pm_job2['objectType'] = 'VnfIntCp'
_pm_job2['criteria']['performanceMetric'] = ['ByteIncomingVnfIntCp']
_pm_job2['criteria']['performanceMetricGroup'] = [
'VnfInternalCp', 'VnfExternalCp']
_pm_report = {
'id': 'report_id',
'jobId': 'pm_job_id',
'entries': [{
# objectType, InstanceId, Metric match the test
# condition.
'objectType': 'Vnf',
'objectInstanceId':
'25b9b9d0-2461-4109-866e-a7767375415b',
'performanceMetric':
'VCpuUsageMeanVnf.'
'25b9b9d0-2461-4109-866e-a7767375415b',
'performanceValues': [{
# current_time - 60sec
'timeStamp': '2022-06-22T01:22:45.678Z',
'value': 12.3
}, {
# current_time - 30sec
'timeStamp': '2022-06-22T01:23:15.678Z',
'value': 45.6
}]
}, {
# objectType, InstanceId, Metric do
# not match the test condition.
'objectType': 'Vnf',
'objectInstanceId':
'25b9b9d0-2461-4109-866e-a7767375415b',
'subObjectInstanceId':
'ebd40865-e3d9-4ac6-b7f0-0a8d2791d07f',
'performanceMetric':
'VCpuUsageMeanVnf.'
'25b9b9d0-2461-4109-866e-a7767375415b',
'performanceValues': [{
# current_time - 30sec
'timeStamp': '2022-06-22T01:23:15.678Z',
'value': 45.6
}]
}, {
# objectType, InstanceId, Metric do
# not match the test condition.
'objectType': 'Vnf',
'objectInstanceId':
'25b9b9d0-2461-4109-866e-a7767375415b',
'performanceMetric':
'VMemoryUsageMeanVnf.'
'25b9b9d0-2461-4109-866e-a7767375415b',
'performanceValues': [{
# current_time - 5sec
'timeStamp': '2022-06-22T01:23:40.678Z',
'value': 78.9
}]
}, {
# objectType, InstanceId, Metric do
# not match the test condition.
'objectType': 'Vnf',
'objectInstanceId':
'test_id',
'performanceMetric':
'VCpuUsageMeanVnf.test_id',
'performanceValues': [{
# current_time + 5sec
'timeStamp': '2022-06-22T01:23:50.678Z',
'value': 0.1
}]
}]
}
_pm_report2 = {
'id': 'report_id',
'jobId': 'pm_job_id',
'entries': []
}
_inst_base = {
'id': '25b9b9d0-2461-4109-866e-a7767375415b',
'vnfdId': 'vnfdId',
'vnfProvider': 'vnfProvider',
'vnfProductName': 'vnfProductName',
'vnfSoftwareVersion': 'vnfSoftwareVersion',
'vnfdVersion': 'vnfdVersion',
'instantiationState': 'NOT_INSTANTIATED',
}
_inst1 = copy.copy(_inst_base)
_inst1.update({
'instantiatedVnfInfo': {
'id': 'id',
'vduId': 'vduId',
'vnfcResourceInfo': [{
'id': 'id2',
'vduId': 'vduId2',
'computeResource': {
'vimLevelResourceType': 'Deployment',
'resourceId': 'pod-pod1'
},
'metadata': {
'hostname': 'node2',
}
}],
'vnfcInfo': [{
'id': 'vnfc_info1',
'vduId': 'vdu_id',
'vnfcResourceInfoId': 'id2',
'vnfcState': 'STARTED'
}]
}
})
datetime_test = datetime.datetime.fromisoformat(
'2022-06-22T01:23:45.678Z'.replace('Z', '+00:00'))
def unload_uuidsentinel():
# Unload uuidsentinel module because it is conflict
# with the freezegun module.
if "tacker.tests.uuidsentinel" in sys.modules:
del sys.modules["tacker.tests.uuidsentinel"]
class _ParamikoTest():
def __init__(self):
pass
def connect(self, **kwargs):
pass
def remove(self, arg1):
pass
def put(self, a1, a2):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
class TestPrometheusPluginPm(base.TestCase):
def setUp(self):
super(TestPrometheusPluginPm, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
prometheus_plugin.PrometheusPluginPm._instance = None
def tearDown(self):
super(TestPrometheusPluginPm, self).tearDown()
# delete singleton object
prometheus_plugin.PrometheusPluginPm._instance = None
def test_constructor_error(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
self.assertRaises(
SystemError,
prometheus_plugin.PrometheusPluginPm)
def test_constructor_stub(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
@mock.patch.object(pm_job_utils, 'get_pm_report')
@mock.patch.object(pm_job_utils, 'get_pm_job')
def test_pm(self, mock_pm_job, mock_pm_report):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
mock_pm_job.return_value = objects.PmJobV2.from_dict(_pm_job)
mock_pm_report.return_value = objects.PerformanceReportV2.from_dict(
_pm_report)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
unload_uuidsentinel()
with freezegun.freeze_time(datetime_test):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) > 0)
self.assertEqual(
result[0]['objectInstanceId'],
'25b9b9d0-2461-4109-866e-a7767375415b')
@mock.patch.object(pm_job_utils, 'get_pm_report')
@mock.patch.object(pm_job_utils, 'get_pm_job')
def test_pm_metrics(self, mock_pm_job, mock_pm_report):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
mock_pm_job.return_value = objects.PmJobV2.from_dict(_pm_job)
mock_pm_report.return_value = objects.PerformanceReportV2.from_dict(
_pm_report)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
unload_uuidsentinel()
with freezegun.freeze_time(datetime_test):
self.assertRaises(
sol_ex.PrometheusPluginError,
pp._alert, self.request, body=_body_pm2
)
@mock.patch.object(pm_job_utils, 'get_pm_report')
@mock.patch.object(pm_job_utils, 'get_pm_job')
def test_pm_report(self, mock_pm_job, mock_pm_report):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
mock_pm_job.return_value = objects.PmJobV2.from_dict(_pm_job)
mock_pm_report.return_value = objects.PerformanceReportV2.from_dict(
_pm_report2)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
unload_uuidsentinel()
with freezegun.freeze_time(datetime_test):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) > 0)
self.assertEqual(
result[0]['objectInstanceId'],
'25b9b9d0-2461-4109-866e-a7767375415b')
mock_pm_report.return_value = None
unload_uuidsentinel()
with freezegun.freeze_time(datetime_test):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) > 0)
self.assertEqual(
result[0]['objectInstanceId'],
'25b9b9d0-2461-4109-866e-a7767375415b')
@mock.patch.object(pm_job_utils, 'get_pm_report')
@mock.patch.object(pm_job_utils, 'get_pm_job')
def test_pm_datetime(self, mock_pm_job, mock_pm_report):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
mock_pm_job.return_value = objects.PmJobV2.from_dict(_pm_job)
mock_pm_report.return_value = objects.PerformanceReportV2.from_dict(
_pm_report)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
unload_uuidsentinel()
# a time pm job is already expired.
datetime_now = datetime.datetime.fromisoformat(
'2023-06-23T04:56:00.910+00:00')
with freezegun.freeze_time(datetime_now):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) == 0)
# now < latest reporting time + reportingPeriod
datetime_now = datetime.datetime.fromisoformat(
'2022-06-22T01:23:25.678+00:00')
with freezegun.freeze_time(datetime_now):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) == 0)
@mock.patch.object(pm_job_utils, 'get_pm_report')
@mock.patch.object(pm_job_utils, 'get_pm_job')
def test_pm_set_callback(self, mock_pm_job, mock_pm_report):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
mock_pm_job.return_value = objects.PmJobV2.from_dict(_pm_job)
mock_pm_report.return_value = objects.PerformanceReportV2.from_dict(
_pm_report)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
pp.set_callback(None)
unload_uuidsentinel()
with freezegun.freeze_time(datetime_test):
result = pp._alert(self.request, body=_body_pm1)
self.assertTrue(len(result) > 0)
self.assertEqual(
result[0]['objectInstanceId'],
'25b9b9d0-2461-4109-866e-a7767375415b')
def test_pm_error_access_info(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
job = copy.deepcopy(_pm_job)
del job['metadata']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.delete_job, context=self.context, pm_job=job
)
job2 = copy.deepcopy(_pm_job)
job2['metadata'] = {'monitoring': {}}
job2 = objects.PmJobV2.from_dict(job2)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.delete_job, context=self.context, pm_job=job2
)
@mock.patch.object(http_client.HttpClient, 'do_request')
@mock.patch.object(paramiko.SFTPClient, 'from_transport')
@mock.patch.object(paramiko, 'Transport')
def test_delete_job(self, mock_paramiko, mock_sftp, mock_do_request):
mock_paramiko.return_value = _ParamikoTest()
mock_sftp.return_value = _ParamikoTest()
resp = webob.Response()
resp.status_code = 202
mock_do_request.return_value = resp, {}
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
# noromal
job = objects.PmJobV2.from_dict(_pm_job)
pp.delete_job(context=self.context, pm_job=job)
# error
resp.status_code = 503
pp.delete_job(context=self.context, pm_job=job)
@mock.patch.object(http_client.HttpClient, 'do_request')
@mock.patch.object(paramiko.SFTPClient, 'from_transport')
@mock.patch.object(paramiko, 'Transport')
@mock.patch.object(inst_utils, 'get_inst')
def test_create_job(
self, mock_inst, mock_paramiko, mock_sftp, mock_do_request):
mock_paramiko.return_value = _ParamikoTest()
mock_sftp.return_value = _ParamikoTest()
resp = webob.Response()
resp.status_code = 202
mock_do_request.return_value = resp, {}
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
# VirtualizedComputeResource
job = objects.PmJobV2.from_dict(_pm_job)
rule = pp.create_job(context=self.context, pm_job=job)
self.assertTrue(len(rule['groups'][0]['rules']) > 0)
# VnfInternalCp
job = objects.PmJobV2.from_dict(_pm_job2)
rule = pp.create_job(context=self.context, pm_job=job)
self.assertTrue(len(rule['groups'][0]['rules']) > 0)
self.assertTrue('interface="*"' in str(rule))
@mock.patch.object(http_client.HttpClient, 'do_request')
@mock.patch.object(paramiko.SFTPClient, 'from_transport')
@mock.patch.object(paramiko, 'Transport')
@mock.patch.object(inst_utils, 'get_inst')
def test_create_job_subobj(
self, mock_inst, mock_paramiko, mock_sftp, mock_do_request):
mock_paramiko.return_value = _ParamikoTest()
mock_sftp.return_value = _ParamikoTest()
resp = webob.Response()
resp.status_code = 202
mock_do_request.return_value = resp, {}
inst = objects.VnfInstanceV2.from_dict(_inst1)
mock_inst.return_value = inst
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
# VirtualizedComputeResource
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
rule = pp.create_job(context=self.context, pm_job=job)
self.assertTrue(len(rule['groups'][0]['rules']) > 0)
self.assertEqual(
rule['groups'][0]['rules'][0]['labels']['sub_object_instance_id'],
job['subObjectInstanceIds'][0])
# VnfInternalCp
job = copy.deepcopy(_pm_job2)
job['subObjectInstanceIds'] = ['test_if0']
job = objects.PmJobV2.from_dict(job)
rule = pp.create_job(context=self.context, pm_job=job)
self.assertTrue(len(rule['groups'][0]['rules']) > 0)
self.assertTrue('interface="test_if0"' in str(rule))
@mock.patch.object(http_client.HttpClient, 'do_request')
@mock.patch.object(paramiko.SFTPClient, 'from_transport')
@mock.patch.object(paramiko, 'Transport')
@mock.patch.object(inst_utils, 'get_inst')
def test_create_job_error(
self, mock_inst, mock_paramiko, mock_sftp, mock_do_request):
mock_paramiko.return_value = _ParamikoTest()
mock_sftp.return_value = _ParamikoTest()
resp = webob.Response()
resp.status_code = 202
mock_do_request.return_value = resp, {}
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
# invalid object type
job = copy.deepcopy(_pm_job)
job['objectType'] = 'invalid_type'
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# invalid performanceMetric or performanceMetricGroup.
job = copy.deepcopy(_pm_job)
job['criteria']['performanceMetric'] = []
job['criteria']['performanceMetricGroup'] = []
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# Invalid performanceMetric or performanceMetricGroup.
job = copy.deepcopy(_pm_job2)
job['criteria']['performanceMetric'] = []
job['criteria']['performanceMetricGroup'] = ['VnfExternalCp']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# no instantiatedVnfInfo
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst_base)
job = objects.PmJobV2.from_dict(_pm_job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# no instantiatedVnfInfo with subObjectInstanceIds
job = copy.deepcopy(_pm_job2)
job['subObjectInstanceIds'] = ['test_if0']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# no valid computeResource
ins = copy.deepcopy(_inst1)
_ = ins['instantiatedVnfInfo']['vnfcResourceInfo'][0]
_['computeResource'] = {}
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = objects.PmJobV2.from_dict(_pm_job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# no vnfcInfo
ins = copy.deepcopy(_inst1)
del ins['instantiatedVnfInfo']['vnfcInfo']
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# vnfcInfo mismatch
ins = copy.deepcopy(_inst1)
ins['instantiatedVnfInfo']['vnfcInfo'][0]['vnfcResourceInfoId'] = 'ng'
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# vnfcInfo mismatch
ins = copy.deepcopy(_inst1)
del ins['instantiatedVnfInfo']['vnfcInfo'][0]['vnfcResourceInfoId']
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# resourcename mismatch: VirtualizedComputeResource
ins = copy.deepcopy(_inst1)
_ = ins['instantiatedVnfInfo']['vnfcResourceInfo']
del _[0]['computeResource']['resourceId']
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
# resourcename mismatch: VirtualizedComputeResource
ins = copy.deepcopy(_inst1)
_ = ins['instantiatedVnfInfo']['vnfcResourceInfo'][0]
_['computeResource']['resourceId'] = 'test-xxx1-756757f8f-xcwmt'
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job)
job['subObjectInstanceIds'] = ['vnfc_info1']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
ins = copy.deepcopy(_inst1)
_ = ins['instantiatedVnfInfo']['vnfcResourceInfo'][0]
_['computeResource']['vimLevelResourceType'] = 'ng'
mock_inst.return_value = objects.VnfInstanceV2.from_dict(ins)
job = copy.deepcopy(_pm_job2)
job['subObjectInstanceIds'] = ['test_if0']
job = objects.PmJobV2.from_dict(job)
self.assertRaises(
sol_ex.PrometheusPluginError,
pp.create_job, context=self.context, pm_job=job
)
class TestPrometheusPluginFm(base.TestCase):
def setUp(self):
super(TestPrometheusPluginFm, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
prometheus_plugin.PrometheusPluginFm._instance = None
def tearDown(self):
super(TestPrometheusPluginFm, self).tearDown()
# delete singleton object
prometheus_plugin.PrometheusPluginFm._instance = None
def test_constructor_error(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginFm)
self.assertRaises(
SystemError,
prometheus_plugin.PrometheusPluginFm)
def test_constructor_stub(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginFm)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginFm)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
def test_pm_no_body(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginPm)
self.assertRaises(
sol_ex.PrometheusPluginValidationError,
pp._alert, self.request)
class TestPrometheusPluginAutoScaling(base.TestCase):
def setUp(self):
super(TestPrometheusPluginAutoScaling, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
prometheus_plugin.PrometheusPluginAutoScaling._instance = None
def tearDown(self):
super(TestPrometheusPluginAutoScaling, self).tearDown()
# delete singleton object
prometheus_plugin.PrometheusPluginAutoScaling._instance = None
def test_constructor_error(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginAutoScaling)
self.assertRaises(
SystemError,
prometheus_plugin.PrometheusPluginAutoScaling)
def test_constructor_stub(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginAutoScaling)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
pp = mon_base.MonitoringPlugin.get_instance(
prometheus_plugin.PrometheusPluginAutoScaling)
self.assertIsInstance(pp._instance, mon_base.MonitoringPluginStub)
def test_monitoring_plugin(self):
mon = mon_base.MonitoringPlugin.get_instance(
mon_base.MonitoringPluginStub)
mon.set_callback(None)
mon.create_job()
mon.delete_job()
mon.alert()
def test_monitoring_plugin_stub(self):
mon_base.MonitoringPlugin.get_instance(
mon_base.MonitoringPluginStub)
mon = mon_base.MonitoringPlugin.get_instance(
mon_base.MonitoringPluginStub)
mon.set_callback(None)
mon.create_job()
mon.delete_job()
mon.alert()
self.assertRaises(
SystemError,
mon_base.MonitoringPluginStub)

View File

@ -147,15 +147,23 @@ class TestSubscriptionUtils(base.BaseTestCase):
self.assertRaises(sol_ex.TestNotificationFailed,
subsc_utils.test_notification, subsc_no_auth)
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_test_notification_error(self, mock_resp):
class mock_session():
def request(url, method, raise_exc=False, **kwargs):
resp = requests.Response()
resp.status_code = 400
resp.headers['Content-Type'] = 'application/zip'
return resp
@mock.patch.object(http_client.HttpClient, '_decode_body')
@mock.patch.object(http_client.NoAuthHandle, 'get_session')
def test_test_notification_error(self, mock_session, mock_decode_body):
subsc_no_auth = objects.LccnSubscriptionV2(
id='sub-1', verbosity='SHORT',
callbackUri='http://127.0.0.1/callback')
resp_no_auth = Exception()
mock_resp.return_value = (resp_no_auth, None)
mock_session.return_value = self.mock_session
mock_decode_body.return_value = None
# execute no_auth
self.assertRaises(sol_ex.TestNotificationFailed,
subsc_utils.test_notification, subsc_no_auth)

View File

@ -0,0 +1,162 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from tacker import context
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.conductor import conductor_v2
from tacker.sol_refactored.conductor import prometheus_plugin_driver as pp_drv
from tacker.sol_refactored import objects
from tacker.tests.unit.db import base as db_base
from unittest import mock
_req1 = {
'flavourId': 'flavour'
}
_req2 = {
'flavourId': 'flavour',
'additionalParams': {},
}
_req3 = {
'flavourId': 'flavour',
'additionalParams': {
'monitoring': {
'monitorName': 'prometheus',
'driverType': 'external',
'targetsInfo': [
{
'prometheusHost':
'prometheusHost',
'prometheusHostPort': '22',
'authInfo': {
'ssh_username': 'ssh_username',
'ssh_password': 'ssh_password'
},
'alertRuleConfigPath':
'alertRuleConfigPath',
'prometheusReloadApiEndpoint':
'prometheusReloadApiEndpoint'
}
]
}
},
}
_inst1 = {
'id': '25b9b9d0-2461-4109-866e-a7767375415b',
'vnfdId': 'vnfdId',
'vnfProvider': 'vnfProvider',
'vnfProductName': 'vnfProductName',
'vnfSoftwareVersion': 'vnfSoftwareVersion',
'vnfdVersion': 'vnfdVersion',
'instantiationState': 'NOT_INSTANTIATED',
}
_inst2 = {
'id': '25b9b9d0-2461-4109-866e-a7767375415b',
'vnfdId': 'vnfdId',
'vnfProvider': 'vnfProvider',
'vnfProductName': 'vnfProductName',
'vnfSoftwareVersion': 'vnfSoftwareVersion',
'vnfdVersion': 'vnfdVersion',
'instantiationState': 'NOT_INSTANTIATED',
'metadata': {}
}
_inst3 = {
'id': '25b9b9d0-2461-4109-866e-a7767375415b',
'vnfdId': 'vnfdId',
'vnfProvider': 'vnfProvider',
'vnfProductName': 'vnfProductName',
'vnfSoftwareVersion': 'vnfSoftwareVersion',
'vnfdVersion': 'vnfdVersion',
'instantiationState': 'NOT_INSTANTIATED',
'metadata': {
'monitoring': {
'monitorName': 'prometheus',
'driverType': 'external',
'targetsInfo': [
{
'prometheusHost':
'prometheusHost',
'prometheusHostPort': '22',
'authInfo': {
'ssh_username': 'ssh_username',
'ssh_password': 'ssh_password'
},
'alertRuleConfigPath':
'alertRuleConfigPath',
'prometheusReloadApiEndpoint':
'prometheusReloadApiEndpoint'
}
]
}
}
}
class TestPrometheusPlugin(db_base.SqlTestCase):
def setUp(self):
super(TestPrometheusPlugin, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
self.conductor = conductor_v2.ConductorV2()
@mock.patch.object(http_client.HttpClient, 'do_request')
def test_requst_scale(self, mock_do_request):
resp = webob.Response()
resp.status_code = 202
mock_do_request.return_value = resp, {}
scale_req = {
'type': 'SCALE_OUT',
'aspect_id': 'vdu',
}
self.conductor.request_scale(
self.context, 'vnf_instance_id', scale_req)
def test_constructor(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp_drv.PrometheusPluginDriver._instance = None
def test_driver_stub(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=False)
pp_drv.PrometheusPluginDriver._instance = None
drv = pp_drv.PrometheusPluginDriver.instance()
drv = pp_drv.PrometheusPluginDriver.instance()
drv.request_scale(None, None, None)
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp_drv.PrometheusPluginDriver._instance = None
drv = pp_drv.PrometheusPluginDriver.instance()
def test_driver_constructor(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
pp_drv.PrometheusPluginDriver._instance = None
pp_drv.PrometheusPluginDriver.instance()
self.assertRaises(
SystemError,
pp_drv.PrometheusPluginDriver)

View File

@ -0,0 +1,69 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from oslo_utils import uuidutils
from tacker import context
from tacker.sol_refactored.common import fm_subscription_utils
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.conductor import vnffm_driver_v1
from tacker.sol_refactored import objects
from tacker.tests import base
from tacker.tests.unit.sol_refactored.samples import fakes_for_fm
class TestVnffmDriverV1(base.BaseTestCase):
def setUp(self):
super(TestVnffmDriverV1, self).setUp()
objects.register_all()
self.driver = vnffm_driver_v1.VnfFmDriverV1()
self.context = context.get_admin_context()
@mock.patch.object(fm_subscription_utils, 'send_notification')
@mock.patch.object(fm_subscription_utils, 'get_alarm_subscs')
@mock.patch.object(objects.base.TackerPersistentObject, 'create')
@mock.patch.object(objects.base.TackerPersistentObject, 'update')
@mock.patch.object(inst_utils, 'get_inst')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_store_alarm_info(self, mock_alarm, mock_inst, mock_update,
mock_create, mock_subscs, mock_send_notif):
alarm = objects.AlarmV1.from_dict(fakes_for_fm.alarm_example)
mock_alarm.return_value = objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)
mock_inst.return_value = objects.VnfInstanceV2(
# required fields
id=fakes_for_fm.alarm_example['managedObjectId'],
vnfdId=uuidutils.generate_uuid(),
vnfProvider='provider',
vnfProductName='product name',
vnfSoftwareVersion='software version',
vnfdVersion='vnfd version',
instantiationState='INSTANTIATED'
)
mock_subscs.return_value = [objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)]
self.driver.store_alarm_info(self.context, alarm)
self.assertEqual(0, mock_create.call_count)
self.assertEqual(1, mock_update.call_count)
self.assertEqual(1, mock_send_notif.call_count)
mock_alarm.return_value = None
self.driver.store_alarm_info(self.context, alarm)
self.assertEqual(1, mock_create.call_count)
self.assertEqual(1, mock_update.call_count)
self.assertEqual(2, mock_send_notif.call_count)

View File

@ -0,0 +1,113 @@
# Copyright (C) 2022 FUJITSU
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from tacker.tests import base
from tacker import context
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.conductor.vnfpm_driver_v2 import VnfPmDriverV2
from tacker.sol_refactored.nfvo.nfvo_client import NfvoClient
from tacker.sol_refactored import objects
class TestVnfPmDriverV2(base.BaseTestCase):
def setUp(self):
super(TestVnfPmDriverV2, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
@mock.patch.object(NfvoClient, 'send_pm_job_notification')
@mock.patch.object(pm_job_utils, 'update_report')
@mock.patch.object(objects.base.TackerPersistentObject, 'update')
@mock.patch.object(objects.base.TackerPersistentObject, 'create')
def test_store_job_info(self, mock_create, mock_update, mock_update_report,
mock_send):
mock_create.return_value = None
mock_update.return_value = None
pm_job = objects.PmJobV2(
id='pm_job_1',
objectTtype='VNF',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
),
callbackUri='http://127.0.0.1/callback'
)
report = {
"id": "fake_id",
"jobId": "fake_job_id",
"entries": [{
"objectType": "VNF",
"objectInstanceId": "instance_id_1",
"subObjectInstanceId": "subObjectInstanceId_1",
'performanceValues': [{
'timeStamp': "2022-06-21T23:47:36.453Z",
'value': "99.0"
}]
}]
}
mock_update_report.return_value = pm_job
mock_send.return_value = None
VnfPmDriverV2().store_job_info(context=self.context,
report=report)
@mock.patch.object(objects.base.TackerPersistentObject, 'create')
def test_store_report(self, mock_create):
mock_create.return_value = None
report = {
"id": "fake_id",
"jobId": "fake_job_id",
"entries": [{
"objectType": "VNF",
"objectInstanceId": "instance_id_1",
"subObjectInstanceId": "subObjectInstanceId_1",
'performanceValues': [{
'timeStamp': "2022-06-21T23:47:36.453Z",
'value': "99.0"
}]
}]
}
result = VnfPmDriverV2()._store_report(context=self.context,
report=report)
self.assertEqual('fake_job_id', result.jobId)
@mock.patch.object(objects.base.TackerPersistentObject, 'update')
@mock.patch.object(pm_job_utils, 'update_report')
def test_update_job_reports(self, mock_update_report, mock_update):
pm_job = objects.PmJobV2(
id='pm_job_1',
objectTtype='VNF',
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
),
callbackUri='http://127.0.0.1/callback'
)
mock_update_report.return_value = pm_job
mock_update.return_value = None
result = VnfPmDriverV2()._update_job_reports(
context=self.context, job_id='pm_job_1', report='report',
timestamp='timestamp')
self.assertEqual('pm_job_1', result.id)

View File

@ -0,0 +1,380 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
from unittest import mock
from tacker import context
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils
from tacker.sol_refactored.common import prometheus_plugin as plugin
from tacker.sol_refactored.common import vnf_instance_utils as inst_utils
from tacker.sol_refactored.controller import prometheus_plugin_controller
from tacker.sol_refactored import objects
from tacker.tests.unit import base
_body_base = {
'receiver': 'receiver',
'status': 'firing',
'alerts': [
],
'groupLabels': {},
'commonLabels': {
'alertname': 'NodeInstanceDown',
'job': 'node'
},
'commonAnnotations': {
'description': 'sample'
},
'externalURL': 'http://controller147:9093',
'version': '4',
'groupKey': '{}:{}',
'truncatedAlerts': 0
}
_body_fm_alert1 = {
'status': 'firing',
'labels': {
'receiver_type': 'tacker',
'function_type': 'vnffm',
'vnf_instance_id': 'vnf_instance_id',
'pod': r'test\-test1\-[0-9a-f]{1,10}-[0-9a-z]{5}$',
'perceived_severity': 'CRITICAL',
'event_type': 'PROCESSING_ERROR_ALARM'
},
'annotations': {
'probable_cause': '',
'fault_type': '',
'fault_details': ''
},
'startsAt': '2022-06-21T23:47:36.453Z',
'endsAt': '0001-01-01T00:00:00Z',
'generatorURL': 'http://controller147:9090/graph?g0.expr='
'up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1',
'fingerprint': '5ef77f1f8a3ecb8d'
}
# function_type mismatch
_body_fm_alert2 = copy.deepcopy(_body_fm_alert1)
_body_fm_alert2['labels']['function_type'] = 'vnfpm'
# status resolved
_body_fm_alert3 = copy.deepcopy(_body_fm_alert1)
_body_fm_alert3['status'] = 'resolved'
# pod mismatch
_body_fm_alert4 = copy.deepcopy(_body_fm_alert1)
_body_fm_alert4['labels']['pod'] = 'mismatch_node'
# pod does not exist
_body_fm_alert5 = copy.deepcopy(_body_fm_alert1)
del _body_fm_alert5['labels']['pod']
_body_fm1 = copy.copy(_body_base)
_body_fm1.update({
'alerts': [_body_fm_alert1, _body_fm_alert2]
})
_body_fm2 = copy.copy(_body_base)
_body_fm2.update({
'alerts': [_body_fm_alert3]
})
_body_fm3 = copy.copy(_body_base)
_body_fm3.update({
'alerts': [_body_fm_alert4]
})
_body_fm4 = copy.copy(_body_base)
_body_fm4.update({
'alerts': [_body_fm_alert5]
})
_not_cleared_alarms = {
'id': 'id',
'managedObjectId': 'managedObjectId',
'rootCauseFaultyResource': {
'faultyResource': {
'resourceId': 'resourceId',
'vimConnectionId': 'vimConnectionId',
'vimLevelResourceType': 'vimLevelResourceType'
},
'faultyResourceType': 'COMPUTE'
},
'faultDetails': [
'fingerprint: 5ef77f1f8a3ecb8d'
],
'alarmRaisedTime': '2022-06-23T04:56:00.910Z',
'ackState': 'UNACKNOWLEDGED',
'perceivedSeverity': 'WARNING',
'eventTime': '2022-06-23T04:56:00.910Z',
'eventType': 'PROCESSING_ERROR_ALARM',
'probableCause': 'problemCause',
'isRootCause': False
}
_body_scale_alert1 = {
'status': 'firing',
'labels': {
'receiver_type': 'tacker',
'function_type': 'auto_scale',
'vnf_instance_id': 'vnf instance id',
'auto_scale_type': 'SCALE_OUT',
'aspect_id': 'aspect'
},
'annotations': {
},
'startsAt': '2022-06-21T23:47:36.453Z',
'endsAt': '0001-01-01T00:00:00Z',
'generatorURL': 'http://controller147:9090/graph?g0.expr='
'up%7Bjob%3D%22node%22%7D+%3D%3D+0&g0.tab=1',
'fingerprint': '5ef77f1f8a3ecb8d'
}
# fuction_type mismatch
_body_scale_alert2 = copy.deepcopy(_body_scale_alert1)
_body_scale_alert2['labels']['function_type'] = 'vnffm'
_body_scale = copy.deepcopy(_body_base)
_body_scale.update({
'alerts': [_body_scale_alert1, _body_scale_alert2]
})
_inst1 = {
'id': 'test_id',
'vnfdId': 'vnfdId',
'vnfProvider': 'vnfProvider',
'vnfProductName': 'vnfProductName',
'vnfSoftwareVersion': 'vnfSoftwareVersion',
'vnfdVersion': 'vnfdVersion',
'instantiationState': 'NOT_INSTANTIATED',
'instantiatedVnfInfo': {
'id': 'id',
'vduId': 'vduId',
'vnfcResourceInfo': [
{
'id': 'id',
'vduId': 'vduId',
'computeResource': {},
'metadata': {
'hostname': 'node1',
}
}, {
'id': 'id2',
'vduId': 'vduId2',
'computeResource': {
'vimLevelResourceType': 'Deployment',
'resourceId': 'test-test1-756757f8f-xcwmt'
}
}
],
'vnfcInfo': [{
'id': 'vnfc_info1',
'vduId': 'vdu_id',
'vnfcResourceInfoId': 'id2',
'vnfcState': 'STARTED'
}]
},
'metadata': {
}
}
_inst2 = copy.deepcopy(_inst1)
_inst2.update({
'vnfConfigurableProperties': {
'isAutoscaleEnabled': True
},
})
datetime_test = datetime.datetime.fromisoformat(
'2022-06-22T01:23:45.678Z'.replace('Z', '+00:00'))
class TestPrometheusPluginPm(base.TestCase):
def setUp(self):
super(TestPrometheusPluginPm, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
self.controller = prometheus_plugin_controller.PmEventController()
plugin.PrometheusPluginPm._instance = None
def tearDown(self):
super(TestPrometheusPluginPm, self).tearDown()
# delete singleton object
plugin.PrometheusPluginPm._instance = None
def test_pm_event_config_false(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=False)
self.assertRaises(
sol_ex.PrometheusPluginNotEnabled,
self.controller.pm_event, self.request, {})
def test_pm_exception(self):
self.config_fixture.config(
group='prometheus_plugin', performance_management=True)
result = self.controller.pm_event(self.request, {})
self.assertEqual(204, result.status)
class TestPrometheusPluginFm(base.TestCase):
def setUp(self):
super(TestPrometheusPluginFm, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
self.controller = prometheus_plugin_controller.FmAlertController()
plugin.PrometheusPluginFm._instance = None
def tearDown(self):
super(TestPrometheusPluginFm, self).tearDown()
# delete singleton object
plugin.PrometheusPluginFm._instance = None
def test_fm_config_false(self):
self.config_fixture.config(
group='prometheus_plugin', fault_management=False)
self.assertRaises(
sol_ex.PrometheusPluginNotEnabled,
self.controller.alert, self.request, {})
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
@mock.patch.object(inst_utils, 'get_inst')
def test_fm_firing(self, mock_inst, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.return_value = []
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
result = self.controller.alert(self.request, _body_fm1)
self.assertEqual(204, result.status)
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
def test_fm_firing_exception(self, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.side_effect = Exception("test exception")
result = self.controller.alert(self.request, _body_fm1)
self.assertEqual(204, result.status)
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
@mock.patch.object(inst_utils, 'get_inst')
def test_fm_firing_mismatch(self, mock_inst, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.return_value = []
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
result = self.controller.alert(self.request, _body_fm3)
self.assertEqual(204, result.status)
result = self.controller.alert(self.request, _body_fm4)
self.assertEqual(204, result.status)
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
@mock.patch.object(inst_utils, 'get_inst')
def test_fm_already_firing(self, mock_inst, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.return_value = [
objects.AlarmV1.from_dict(_not_cleared_alarms)]
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
result = self.controller.alert(self.request, _body_fm1)
self.assertEqual(204, result.status)
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
@mock.patch.object(inst_utils, 'get_inst')
def test_fm_resolved(self, mock_inst, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.return_value = [
objects.AlarmV1.from_dict(_not_cleared_alarms)]
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
result = self.controller.alert(self.request, _body_fm2)
self.assertEqual(204, result.status)
@mock.patch.object(fm_alarm_utils, 'get_not_cleared_alarms')
@mock.patch.object(inst_utils, 'get_inst')
def test_fm_set_callback(self, mock_inst, mock_alarms):
self.config_fixture.config(
group='prometheus_plugin', fault_management=True)
mock_alarms.return_value = [
objects.AlarmV1.from_dict(_not_cleared_alarms)]
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
_plugin = plugin.PrometheusPluginFm.instance()
_plugin.set_callback(None)
result = self.controller.alert(self.request, _body_fm2)
self.assertEqual(204, result.status)
mock_alarms.return_value = []
result = self.controller.alert(self.request, _body_fm1)
self.assertEqual(204, result.status)
class TestPrometheusPluginAutoScaling(base.TestCase):
def setUp(self):
super(TestPrometheusPluginAutoScaling, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
self.controller = prometheus_plugin_controller.AutoScalingController()
plugin.PrometheusPluginAutoScaling._instance = None
def tearDown(self):
super(TestPrometheusPluginAutoScaling, self).tearDown()
# delete singleton object
plugin.PrometheusPluginAutoScaling._instance = None
def test_auto_scaling_config_false(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=False)
self.assertRaises(
sol_ex.PrometheusPluginNotEnabled,
self.controller.auto_scaling_id, self.request, 'id', {})
@mock.patch.object(inst_utils, 'get_inst')
def test_auto_scaling_no_autoscale_enabled(self, mock_inst):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=True)
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst1)
result = self.controller.auto_scaling_id(
self.request, 'id', _body_scale)
self.assertEqual(204, result.status)
@mock.patch.object(inst_utils, 'get_inst')
def test_auto_scaling_is_autoscale_enabled(self, mock_inst):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=True)
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst2)
result = self.controller.auto_scaling(self.request, _body_scale)
self.assertEqual(204, result.status)
@mock.patch.object(inst_utils, 'get_inst')
def test_auto_scaling_set_callback(self, mock_inst):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=True)
_plugin = plugin.PrometheusPluginAutoScaling.instance()
_plugin.set_callback(None)
mock_inst.return_value = objects.VnfInstanceV2.from_dict(_inst2)
result = self.controller.auto_scaling(self.request, _body_scale)
self.assertEqual(204, result.status)
def test_auto_scaling_error_body(self):
self.config_fixture.config(
group='prometheus_plugin', auto_scaling=True)
result = self.controller.auto_scaling(self.request, {})
self.assertEqual(204, result.status)

View File

@ -0,0 +1,205 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from unittest import mock
from tacker import context
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import fm_alarm_utils as alarm_utils
from tacker.sol_refactored.common import fm_subscription_utils as subsc_utils
from tacker.sol_refactored.controller import vnffm_v1
from tacker.sol_refactored import objects
from tacker.tests import base
from tacker.tests.unit.sol_refactored.samples import fakes_for_fm
SAMPLE_INST_ID = 'c61314d0-f583-4ab3-a457-46426bce02d3'
SAMPLE_ALARM_ID = '78a39661-60a8-4824-b989-88c1b0c3534a'
SAMPLE_SUBSC_ID = '78a39661-60a8-4824-b989-88c1b0c3534a'
class TestVnffmV1(base.BaseTestCase):
def setUp(self):
super(TestVnffmV1, self).setUp()
objects.register_all()
self.controller = vnffm_v1.VnfFmControllerV1()
self.context = context.get_admin_context()
self.context.api_version = api_version.APIVersion("1.3.0")
self.request = mock.Mock()
self.request.context = self.context
def test_supported_api_versions(self):
result = self.controller.supported_api_versions('show')
self.assertEqual(['1.3.0'], result)
def test_allowed_content_types(self):
result = self.controller.allowed_content_types('show')
self.assertEqual(['application/json', 'text/plain'], result)
result = self.controller.allowed_content_types('update')
self.assertEqual(['application/mergepatch+json', 'application/json',
'text/plain'], result)
@mock.patch.object(alarm_utils, 'get_alarms_all')
def test_index(self, mock_alarms):
request = requests.Request()
request.context = self.context
request.GET = {'filter': f'(eq,managedObjectId,{SAMPLE_INST_ID})'}
mock_alarms.return_value = [objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example), objects.AlarmV1(
id='test-1', managedObjectId='inst-1')]
result = self.controller.index(request)
self.assertEqual(200, result.status)
self.assertEqual([fakes_for_fm.alarm_example], result.body)
self.assertEqual('1.3.0', result.headers['version'])
# no filter
request.GET = {}
result = self.controller.index(request)
self.assertEqual(200, result.status)
self.assertEqual(2, len(result.body))
self.assertEqual('1.3.0', result.headers['version'])
@mock.patch.object(alarm_utils, 'get_alarm')
def test_show(self, mock_alarm):
request = requests.Request()
request.context = self.context
mock_alarm.return_value = objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)
result = self.controller.show(request, SAMPLE_ALARM_ID)
self.assertEqual(200, result.status)
self.assertEqual(fakes_for_fm.alarm_example, result.body)
self.assertEqual('1.3.0', result.headers['version'])
@mock.patch.object(objects.base.TackerPersistentObject, 'update')
@mock.patch.object(alarm_utils, 'get_alarm')
def test_update(self, mock_alarm, mock_update):
mock_alarm.return_value = objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)
body = {"ackState": "ACKNOWLEDGED"}
result = self.controller.update(
request=self.request, id=SAMPLE_ALARM_ID, body=body)
self.assertEqual(200, result.status)
self.assertEqual('1.3.0', result.headers['version'])
self.assertEqual(body, result.body)
@mock.patch.object(alarm_utils, 'get_alarm')
def test_update_invalid_body(self, mock_alarm):
mock_alarm.return_value = objects.AlarmV1.from_dict(
fakes_for_fm.alarm_example)
body = {"ackState": "UNACKNOWLEDGED"}
self.assertRaises(sol_ex.AckStateInvalid, self.controller.update,
request=self.request, id=SAMPLE_ALARM_ID, body=body)
@mock.patch.object(objects.base.TackerPersistentObject, 'create')
@mock.patch.object(subsc_utils, 'test_notification')
def test_subscription_create(self, mock_test, mock_create):
body = {
"callbackUri": "http://127.0.0.1:6789/notification",
"authentication": {
"authType": ["BASIC", "OAUTH2_CLIENT_CREDENTIALS"],
"paramsBasic": {
"userName": "test",
"password": "test"
},
"paramsOauth2ClientCredentials": {
"clientId": "test",
"clientPassword": "test",
"tokenEndpoint": "https://127.0.0.1/token"
}
},
"filter": fakes_for_fm.fm_subsc_example['filter']
}
result = self.controller.subscription_create(
request=self.request, body=body)
self.assertEqual(201, result.status)
self.assertEqual(body['callbackUri'], result.body['callbackUri'])
self.assertEqual(body['filter'], result.body['filter'])
self.assertIsNone(result.body.get('authentication'))
def test_invalid_subscripion(self):
body = {
"callbackUri": "http://127.0.0.1:6789/notification",
"authentication": {
"authType": ["BASIC"]
}
}
ex = self.assertRaises(sol_ex.InvalidSubscription,
self.controller.subscription_create, request=self.request,
body=body)
self.assertEqual("ParamsBasic must be specified.", ex.detail)
body = {
"callbackUri": "http://127.0.0.1:6789/notification",
"authentication": {
"authType": ["OAUTH2_CLIENT_CREDENTIALS"]
}
}
ex = self.assertRaises(sol_ex.InvalidSubscription,
self.controller.subscription_create, request=self.request,
body=body)
self.assertEqual("paramsOauth2ClientCredentials must be specified.",
ex.detail)
body = {
"callbackUri": "http://127.0.0.1:6789/notification",
"authentication": {
"authType": ["TLS_CERT"]
}
}
ex = self.assertRaises(sol_ex.InvalidSubscription,
self.controller.subscription_create, request=self.request,
body=body)
self.assertEqual("'TLS_CERT' is not supported at the moment.",
ex.detail)
@mock.patch.object(subsc_utils, 'get_subsc_all')
def test_subscription_list(self, mock_subsc):
request = requests.Request()
request.context = self.context
request.GET = {
'filter': '(eq,callbackUri,/nfvo/notify/alarm)'}
mock_subsc.return_value = [
objects.FmSubscriptionV1.from_dict(fakes_for_fm.fm_subsc_example)]
result = self.controller.subscription_list(request)
self.assertEqual(200, result.status)
# no filter
request.GET = {}
result = self.controller.subscription_list(request)
self.assertEqual(200, result.status)
@mock.patch.object(subsc_utils, 'get_subsc')
def test_subscription_show(self, mock_subsc):
mock_subsc.return_value = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
result = self.controller.subscription_show(
request=self.request, id=SAMPLE_SUBSC_ID)
self.assertEqual(200, result.status)
@mock.patch.object(subsc_utils, 'get_subsc')
@mock.patch.object(objects.base.TackerPersistentObject, 'delete')
def test_subscription_delete(self, mock_delete, mock_subsc):
mock_subsc.return_value = objects.FmSubscriptionV1.from_dict(
fakes_for_fm.fm_subsc_example)
result = self.controller.subscription_delete(
request=self.request, id=SAMPLE_SUBSC_ID)
self.assertEqual(204, result.status)

View File

@ -0,0 +1,433 @@
# Copyright (C) 2022 FUJITSU
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import requests
from unittest import mock
from tacker import context
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import exceptions as sol_ex
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.controller.vnflcm_view import BaseViewBuilder
from tacker.sol_refactored.controller.vnflcm_view import Pager
from tacker.sol_refactored.controller import vnfpm_v2
from tacker.sol_refactored.controller import vnfpm_view
from tacker.sol_refactored.controller.vnfpm_view import PmJobViewBuilder
from tacker.sol_refactored import objects
from tacker.tests import base
CONF = config.CONF
class TestVnfpmV2(base.BaseTestCase):
def setUp(self):
super(TestVnfpmV2, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.context.api_version = api_version.APIVersion("2.1.0")
self.request = mock.Mock()
self.request.context = self.context
self.controller = vnfpm_v2.VnfPmControllerV2()
self.endpoint = CONF.v2_vnfm.endpoint
self._pm_job_view = vnfpm_view.PmJobViewBuilder(self.endpoint)
def test_check_http_client_auth(self):
auth_req = {
'authType': ['BASIC'],
'paramsBasic': None
}
self.assertRaises(sol_ex.InvalidSubscription,
vnfpm_v2._check_http_client_auth,
auth_req=auth_req)
auth_req_1 = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test',
'password': 'test'
},
}
result = vnfpm_v2._check_http_client_auth(auth_req_1)
self.assertEqual(['BASIC'], result.authType)
auth_req_2 = {
'authType': ['OAUTH2_CLIENT_CREDENTIALS'],
'paramsOauth2ClientCredentials': {
'clientId': 'test',
'clientPassword': 'test',
'tokenEndpoint':
'http://127.0.0.1/token'
}
}
result = vnfpm_v2._check_http_client_auth(auth_req_2)
self.assertEqual(['OAUTH2_CLIENT_CREDENTIALS'], result.authType)
auth_req_3 = {
'authType': ['OAUTH2_CLIENT_CREDENTIALS'],
}
self.assertRaises(sol_ex.InvalidSubscription,
vnfpm_v2._check_http_client_auth,
auth_req=auth_req_3)
auth_req_4 = {
'authType': ['TLS_CERT']
}
self.assertRaises(sol_ex.InvalidSubscription,
vnfpm_v2._check_http_client_auth,
auth_req=auth_req_4)
def test_check_performance_metric_or_group(self):
vnfpm_v2._check_performance_metric_or_group(
obj_type='Vnf',
metric_group=['VirtualisedComputeResource'],
performance_metric=['VCpuUsageMeanVnf.VNF'])
self.assertRaises(sol_ex.PMJobInvalidRequest,
vnfpm_v2._check_performance_metric_or_group,
obj_type='Vnf',
metric_group=['VirtualisedComputeResource',
'VnfInternalCP'],
performance_metric=['VCpuUsageMeanVnf.VNF'])
self.assertRaises(sol_ex.PMJobInvalidRequest,
vnfpm_v2._check_performance_metric_or_group,
obj_type='Vnf',
metric_group=['VirtualisedComputeResource'],
performance_metric=['ByteIncomingVnfExtCp.VNF'])
def test_create_error_1(self):
_PmJobCriteriaV2 = {
'performanceMetric': ['VCpuUsageMeanVnf.VNF'],
'performanceMetricGroup': ['VirtualisedComputeResource'],
'collectionPeriod': 10,
'reportingPeriod': 11,
'reportingBoundary': '2022-08-05T02:24:46Z',
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
"objectType": "Vnf",
"objectInstanceIds": ["id_1", "id_2", "id_3"],
"subObjectInstanceIds": ["sub_id_1", "sub_id_2"],
"criteria": _PmJobCriteriaV2,
"authentication": _SubscriptionAuthentication,
"callbackUri": 'callbackuri',
}
self.assertRaises(sol_ex.PMJobInvalidRequest,
self.controller.create,
request=self.request, body=body)
def test_create_error_2(self):
_PmJobCriteriaV2 = {
'performanceMetric': [],
'performanceMetricGroup': [],
'collectionPeriod': 10,
'reportingPeriod': 11,
'reportingBoundary': '2022-08-05T02:24:46Z',
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
'objectType': 'Vnf',
'objectInstanceIds': ['id_1'],
'subObjectInstanceIds': ['sub_id_1', 'sub_id_2'],
'criteria': _PmJobCriteriaV2,
'callbackUri': 'callbackuri',
'authentication': _SubscriptionAuthentication
}
self.assertRaises(sol_ex.PMJobInvalidRequest,
self.controller.create,
request=self.request, body=body)
def test_create_error_3(self):
_PmJobCriteriaV2 = {
'performanceMetric': ['VCpuUsageMeanVnf.VNF'],
'performanceMetricGroup': ['error-test'],
'collectionPeriod': 10,
'reportingPeriod': 11,
'reportingBoundary': '2022-08-05T02:24:46Z',
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
'objectType': 'Vnf',
'objectInstanceIds': ['id_1'],
'subObjectInstanceIds': ['sub_id_1', 'sub_id_2'],
'criteria': _PmJobCriteriaV2,
'callbackUri': 'callbackuri',
'authentication': _SubscriptionAuthentication
}
self.assertRaises(sol_ex.PMJobInvalidRequest,
self.controller.create,
request=self.request, body=body)
def test_create_error_4(self):
_PmJobCriteriaV2 = {
'performanceMetric': ['error.VNF'],
'performanceMetricGroup': ['VirtualisedComputeResource'],
'collectionPeriod': 10,
'reportingPeriod': 11,
'reportingBoundary': '2022-08-05T02:24:46Z',
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
'objectType': 'Vnf',
'objectInstanceIds': ['id_1'],
'subObjectInstanceIds': ['sub_id_1', 'sub_id_2'],
'criteria': _PmJobCriteriaV2,
'callbackUri': 'callbackuri',
'authentication': _SubscriptionAuthentication
}
self.assertRaises(sol_ex.PMJobInvalidRequest,
self.controller.create,
request=self.request, body=body)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_create_error_5(self, mock_inst):
mock_inst.return_value = objects.VnfInstanceV2(
id='dummy-vnfInstanceId-1', vnfdId='dummy-vnfdId-1',
vnfProvider='dummy-vnfProvider-1',
instantiationState='NOT_INSTANTIATED',
vnfProductName='dummy-vnfProductName-1-1',
vnfSoftwareVersion='1.0', vnfdVersion='1.0',
vnfInstanceName='dummy-vnfInstanceName-1')
_PmJobCriteriaV2 = {
'performanceMetric': ['VCpuUsageMeanVnf.VNF'],
'performanceMetricGroup': ['VirtualisedComputeResource'],
'collectionPeriod': 10,
'reportingPeriod': 11,
'reportingBoundary': '2022-08-05T02:24:46Z',
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
'objectType': 'Vnf',
'objectInstanceIds': ['id_1'],
'subObjectInstanceIds': ['sub_id_1', 'sub_id_2'],
'criteria': _PmJobCriteriaV2,
'callbackUri': 'callbackuri',
'authentication': _SubscriptionAuthentication
}
self.assertRaises(sol_ex.VnfInstanceIsNotInstantiated,
self.controller.create,
request=self.request, body=body)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_create_error6(self, mock_inst):
mock_inst.return_value = objects.VnfInstanceV2(
id='dummy-vnfInstanceId-1', vnfdId='dummy-vnfdId-1',
vnfProvider='dummy-vnfProvider-1',
instantiationState='INSTANTIATED',
vnfProductName='dummy-vnfProductName-1-1',
vnfSoftwareVersion='1.0', vnfdVersion='1.0',
vnfInstanceName='dummy-vnfInstanceName-1')
_PmJobCriteriaV2 = {
"performanceMetric": ["VCpuUsageMeanVnf.VNF"],
"performanceMetricGroup": ["VirtualisedComputeResource"],
"collectionPeriod": 10,
"reportingPeriod": 11,
"reportingBoundary": "invalid datetime format",
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
"objectType": "Vnf",
"objectInstanceIds": ["id_1"],
"subObjectInstanceIds": ["sub_id_1", "sub_id_2"],
"criteria": _PmJobCriteriaV2,
"authentication": _SubscriptionAuthentication,
"callbackUri": "http://127.0.0.1:6789/notification",
'metadata': {"metadata": "example"}
}
self.assertRaises(
sol_ex.SolValidationError,
self.controller.create, request=self.request, body=body)
@mock.patch.object(objects.base.TackerPersistentObject, 'create')
@mock.patch.object(pm_job_utils, 'test_notification')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_create_201(self, mock_inst, mock_notifi, mock_create):
mock_inst.return_value = objects.VnfInstanceV2(
id='dummy-vnfInstanceId-1', vnfdId='dummy-vnfdId-1',
vnfProvider='dummy-vnfProvider-1',
instantiationState='INSTANTIATED',
vnfProductName='dummy-vnfProductName-1-1',
vnfSoftwareVersion='1.0', vnfdVersion='1.0',
vnfInstanceName='dummy-vnfInstanceName-1')
mock_notifi.return_value = None
mock_create.return_value = None
_PmJobCriteriaV2 = {
"performanceMetric": ["VCpuUsageMeanVnf.VNF"],
"performanceMetricGroup": ["VirtualisedComputeResource"],
"collectionPeriod": 10,
"reportingPeriod": 11,
"reportingBoundary": "2022-08-05T02:24:46Z",
}
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
"objectType": "Vnf",
"objectInstanceIds": ["id_1"],
"subObjectInstanceIds": ["sub_id_1", "sub_id_2"],
"criteria": _PmJobCriteriaV2,
"authentication": _SubscriptionAuthentication,
"callbackUri": "http://127.0.0.1:6789/notification",
'metadata': {"metadata": "example"}
}
result = self.controller.create(request=self.request, body=body)
self.assertEqual(201, result.status)
@mock.patch.object(Pager, 'get_link')
@mock.patch.object(BaseViewBuilder, 'detail_list')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_all')
@mock.patch.object(vnfpm_view.PmJobViewBuilder, 'parse_pager')
@mock.patch.object(vnfpm_view.PmJobViewBuilder, 'parse_filter')
@mock.patch.object(vnfpm_view.PmJobViewBuilder, 'parse_selector')
def test_index(self, mock_parse_selector, mock_parse_filter,
mock_parse_pager,
mock_pm,
mock_detail_list,
mock_get_link):
mock_parse_selector.return_value = 'selector'
mock_parse_filter.return_value = 'filter'
request = requests.Request()
request.GET = {
'filter': 'pm_job_id', 'nextpage_opaque_marker': 'marker'}
request.url = 'url'
page_size = CONF.v2_vnfm.vnf_instance_page_size
pager = Pager(request.GET.get('nextpage_opaque_marker'),
request.url,
page_size)
mock_parse_pager.return_value = pager
mock_pm.return_value = [objects.PmJobV2(id='pm_job_1')]
mock_detail_list.return_value = 1
mock_get_link.return_value = 'url'
result = self.controller.index(self.request)
self.assertEqual(200, result.status)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_show(self, mock_pm):
mock_pm.return_value = objects.PmJobV2(
id='pm_job_1',
objectInstanceIds=["id_1"],
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
)
)
result = self.controller.show(self.request, 'pm_job_1')
self.assertEqual(200, result.status)
@mock.patch.object(objects.base.TackerPersistentObject, 'update')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
@mock.patch.object(pm_job_utils, 'test_notification')
def test_update(self, mock_notifi, mock_pm, mock_update):
mock_notifi.return_value = None
mock_pm.return_value = objects.PmJobV2(id='pm_job_1')
mock_update.return_value = None
_SubscriptionAuthentication = {
'authType': ['BASIC'],
'paramsBasic': {
'userName': 'test_name',
'password': 'test_pwd'
}
}
body = {
'objectType': 'Vnf',
'callbackUri': 'callbackuri',
'authentication': _SubscriptionAuthentication
}
result = self.controller.update(request=self.request, id='id',
body=body)
self.assertEqual(200, result.status)
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_filter')
@mock.patch.object(objects.base.TackerPersistentObject, 'get_by_id')
def test_delete(self, mock_pm, mock_report):
mock_pm.return_value = objects.PmJobV2(id='pm_job_1')
mock_report.return_value = [objects.PerformanceReportV2(
id='report_id',
jobId='pm_job_1')]
result = self.controller.delete(self.request, 'pm_job_1')
self.assertEqual(204, result.status)
@mock.patch.object(PmJobViewBuilder, 'report_detail')
@mock.patch.object(pm_job_utils, 'get_pm_report')
def test_report_get(self, mock_get, mock_report):
mock_get.return_value = 'pm_report'
mock_report.return_value = 'pm_report_resp'
result = self.controller.report_get(
self.request, 'pm_job_id', 'report_id')
self.assertEqual(200, result.status)
def test_allowed_content_types(self):
result = self.controller.allowed_content_types('update')
top = ['application/mergepatch+json', 'application/json', 'text/plain']
self.assertEqual(top, result)
result = self.controller.allowed_content_types('create')
top = ['application/json', 'text/plain']
self.assertEqual(top, result)
def test_supported_api_version(self):
result = self.controller.supported_api_versions('create')
self.assertEqual(['2.1.0'], result)

View File

@ -0,0 +1,73 @@
# Copyright (C) 2021 Nippon Telegraph and Telephone Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tacker import context
from tacker.tests import base
from unittest import mock
from tacker.sol_refactored.common import config
from tacker.sol_refactored.controller.vnflcm_view import BaseViewBuilder
from tacker.sol_refactored.controller.vnfpm_view import PmJobViewBuilder
from tacker.sol_refactored import objects
CONF = config.CONF
class TestPmJobViewBuilder(base.BaseTestCase):
def setUp(self):
super(TestPmJobViewBuilder, self).setUp()
objects.register_all()
self.context = context.get_admin_context()
self.request = mock.Mock()
self.request.context = self.context
self.endpoint = CONF.v2_vnfm.endpoint
@mock.patch.object(BaseViewBuilder, 'parse_filter')
def test_parse_filter(self, mock_parse_filter):
mock_parse_filter.return_value = 1
result = PmJobViewBuilder(self.endpoint).parse_filter('filter_param')
self.assertEqual(1, result)
@mock.patch.object(BaseViewBuilder, 'parse_pager')
def test_parse_pager(self, mock_parse_pager):
mock_parse_pager.return_value = 1
page_size = CONF.v2_vnfm.vnfpm_pmjob_page_size
result = PmJobViewBuilder(self.endpoint).parse_pager(
self.request, page_size)
self.assertEqual(1, result)
def test_detail(self):
pm_job = objects.PmJobV2(
id='pm_job_1',
objectInstanceIds=["id_1"],
authentication=objects.SubscriptionAuthentication(
authType=["BASIC"],
paramsBasic=objects.SubscriptionAuthentication_ParamsBasic(
userName='test',
password='test'
),
)
)
result = PmJobViewBuilder(self.endpoint).detail(pm_job)
self.assertEqual('pm_job_1', result.get('id'))
@mock.patch.object(BaseViewBuilder, 'detail_list')
def test_report_detail(self, mock_detail_list):
mock_detail_list.return_value = 1
result = PmJobViewBuilder(self.endpoint).detail_list(
'pm_jobs', 'filters', 'selector', 'pager')
self.assertEqual(1, result)

View File

@ -21,6 +21,7 @@ from tacker import context
from tacker.sol_refactored.api import api_version
from tacker.sol_refactored.common import config
from tacker.sol_refactored.common import http_client
from tacker.sol_refactored.common import pm_job_utils
from tacker.sol_refactored.common import subscription_utils as subsc_utils
from tacker.sol_refactored.common import vnfd_utils
from tacker.sol_refactored.nfvo import local_nfvo
@ -419,3 +420,22 @@ class TestNfvoClient(base.BaseTestCase):
self.context, lcmocc, inst, 'http://127.0.0.1:9890')
self.assertEqual(1, mock_recv.call_count)
self.assertEqual(1, mock_send.call_count)
@mock.patch.object(pm_job_utils, 'send_notification')
@mock.patch.object(pm_job_utils, 'make_pm_notif_data')
def test_send_pm_job_notification(self, mock_notif, mock_send):
mock_notif.return_value = 'mock_notif'
mock_send.return_value = None
entries = {
'objectType': "VNF",
'objectInstanceId': "instance_id_1",
'subObjectInstanceId': "subObjectInstanceId_1"
}
report = objects.PerformanceReportV2(
id=uuidutils.generate_uuid(),
jobId='pm_job_id',
entries=[objects.VnfPmReportV2_Entries.from_dict(entries)]
)
self.nfvo_client.send_pm_job_notification(
report, "pm_job", 'timestamp', self.nfvo_client.endpoint
)

View File

@ -0,0 +1,112 @@
# Copyright (C) 2022 Fujitsu
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
alarm_example = {
"id": "78a39661-60a8-4824-b989-88c1b0c3534a",
"managedObjectId": "c61314d0-f583-4ab3-a457-46426bce02d3",
"rootCauseFaultyResource": {
"faultyResource": {
"vimConnectionId": "0d57e928-86a4-4445-a4bd-1634edae73f3",
"resourceId": "4e6ccbe1-38ec-4b1b-a278-64de09ba01b3",
"vimLevelResourceType": "OS::Nova::Server"
},
"faultyResourceType": "COMPUTE"
},
"alarmRaisedTime": "2021-09-06T10:21:03Z",
"alarmChangedTime": "2021-09-06T10:21:03Z",
"alarmClearedTime": "2021-09-06T10:21:03Z",
"alarmAcknowledgedTime": "2021-09-06T10:21:03Z",
"ackState": "UNACKNOWLEDGED",
"perceivedSeverity": "WARNING",
"eventTime": "2021-09-06T10:21:03Z",
"eventType": "EQUIPMENT_ALARM",
"faultType": "Fault Type",
"probableCause": "The server cannot be connected.",
"isRootCause": False,
"correlatedAlarmIds": [
"c88b624e-e997-4b17-b674-10ca2bab62e0",
"c16d41fd-12e2-49a6-bb17-72faf702353f"
],
"faultDetails": [
"Fault",
"Details"
],
"_links": {
"self": {
"href": "/vnffm/v1/alarms/78a39661-60a8-4824-b989-88c1b0c3534a"
},
"objectInstance": {
"href": "/vnflcm/v1/vnf_instances/"
"0e5f3086-4e79-47ed-a694-54c29155fa26"
}
}
}
fm_subsc_example = {
"id": "78a39661-60a8-4824-b989-88c1b0c3534a",
"filter": {
"vnfInstanceSubscriptionFilter": {
"vnfdIds": [
"dummy-vnfdId-1"
],
"vnfProductsFromProviders": [
{
"vnfProvider": "dummy-vnfProvider-1",
"vnfProducts": [
{
"vnfProductName": "dummy-vnfProductName-1-1",
"versions": [
{
"vnfSoftwareVersion": '1.0',
"vnfdVersions": ['1.0', '2.0']
}
]
}
]
}
],
"vnfInstanceIds": [
"dummy-vnfInstanceId-1"
],
"vnfInstanceNames": [
"dummy-vnfInstanceName-1"
]
},
"notificationTypes": [
"AlarmNotification",
"AlarmClearedNotification"
],
"faultyResourceTypes": [
"COMPUTE"
],
"perceivedSeverities": [
"WARNING"
],
"eventTypes": [
"EQUIPMENT_ALARM"
],
"probableCauses": [
"The server cannot be connected."
]
},
"callbackUri": "/nfvo/notify/alarm",
"_links": {
"self": {
"href": "/vnffm/v1/subscriptions/"
"78a39661-60a8-4824-b989-88c1b0c3534a"
}
}
}

View File

@ -21,3 +21,4 @@ python-barbicanclient>=4.5.2 # Apache-2.0
python-blazarclient>=1.0.1 # Apache-2.0
requests-mock>=1.2.0 # Apache-2.0
PyMySQL>=0.10.1 # MIT
freezegun>=1.2.2 # Apache-2.0

View File

@ -0,0 +1,14 @@
#!/bin/bash -xe
# This script is used to set up a fake prometheus server
# for functional testing.
#
cd /opt/stack/tacker/tacker/tests/functional/sol_kubernetes_v2/samples/
sudo unzip tacker-monitoring-test.zip
cd ./tacker-monitoring-test
sudo docker build -t tacker-monitoring-test .
sudo docker run -v ${PWD}/src:/work/src -v ${PWD}/rules:/etc/prometheus/rules \
-p 55555:55555 -p 50022:22 -e TEST_REMOTE_URI="http://0.0.0.0" -d \
-it tacker-monitoring-test