USM proxy for major release upload/delete

Create software API proxy for major release upload/delete. This proxy
manages dc vault (/opt/dc-vault/software) for major releases. When a
major release is uploaded/deleted. This API proxy service listen to USM
endpoint for SystemController region.

Story: 201676
Task: 50683

TCs:
    Upload major/patching release with --os-region-name SystemController,
    observe iso and patch files are stored in
    /opt/dc-vault/software/<rel>/
    Delete major/patching release with --os-region-name SystemController,
    observe iso and patching files in dc-vault are also deleted
    Delete major release w/o --os-region-name SystemController, iso and
    patching files remain in dc-vault. Subsequently outstanding iso/sig
    file pair and patching files that does not tight to a current major
    release under USM manage is deleted when a POST/DELETE request is
    through USM endpoint for SystemController region (clean up)
    Passed above commands without --os-region-name SystemController
    Passed repeated upload/delete with --os-region-name SystemController
    with and without --local.

Depends-on: https://review.opendev.org/c/starlingx/update/+/925128

Change-Id: Ib7fa30d4a87eef65a10740a9f4785a2cc8d64ebf
Signed-off-by: Bin Qian <Bin.Qian@windriver.com>
This commit is contained in:
Bin Qian 2024-07-24 17:24:03 +00:00
parent 945af3ff49
commit abd2c2809b
10 changed files with 640 additions and 21 deletions

View File

@ -37,6 +37,7 @@ VIRTUAL_MASTER_CLOUD = "SystemController"
SW_UPDATE_DEFAULT_TITLE = "all clouds default"
ANSIBLE_OVERRIDES_PATH = "/opt/dc-vault/ansible"
LOAD_VAULT_DIR = "/opt/dc-vault/loads"
SOFTWARE_VAULT_DIR = "/opt/dc-vault/software"
DEPLOY_DIR = "/opt/platform/deploy"
USER_HEADER_VALUE = "distcloud"
@ -101,6 +102,7 @@ ENDPOINT_TYPE_DC_CERT = "dc-cert"
ENDPOINT_TYPE_FIRMWARE = "firmware"
ENDPOINT_TYPE_KUBERNETES = "kubernetes"
ENDPOINT_TYPE_KUBE_ROOTCA = "kube-rootca"
ENDPOINT_TYPE_USM = "usm"
# All endpoint types
ENDPOINT_TYPES_LIST = [

View File

@ -24,6 +24,7 @@ from dcorch.api.proxy.apps.controller import IdentityAPIController
from dcorch.api.proxy.apps.controller import NeutronAPIController
from dcorch.api.proxy.apps.controller import OrchAPIController
from dcorch.api.proxy.apps.controller import SysinvAPIController
from dcorch.api.proxy.apps.controller import USMAPIController
from dcorch.api.proxy.apps.controller import VersionController
from dcorch.api.proxy.apps.dispatcher import APIDispatcher
from dcorch.api.proxy.apps.patch import PatchAPIController
@ -45,6 +46,7 @@ class Acceptor(Router):
consts.ENDPOINT_TYPE_VOLUME: self._default_dispatcher,
consts.ENDPOINT_TYPE_NETWORK: self._default_dispatcher,
dccommon_consts.ENDPOINT_TYPE_IDENTITY: self._default_dispatcher,
dccommon_consts.ENDPOINT_TYPE_SOFTWARE: self._default_dispatcher,
}
if CONF.type in self.forwarder_map:
forwarder = self.forwarder_map[CONF.type]
@ -58,6 +60,7 @@ class Acceptor(Router):
consts.ENDPOINT_TYPE_NETWORK: self.add_network_routes,
dccommon_consts.ENDPOINT_TYPE_PATCHING: self.add_patch_routes,
dccommon_consts.ENDPOINT_TYPE_IDENTITY: self.add_identity_routes,
dccommon_consts.ENDPOINT_TYPE_SOFTWARE: self.add_usm_routes,
}
self._conf = conf
mapper = routes.Mapper()
@ -127,6 +130,12 @@ class Acceptor(Router):
for key, value in proxy_consts.IDENTITY_PATH_MAP.items():
self._add_resource(mapper, api_controller, value, key, CONF.type)
def add_usm_routes(self, app, conf, mapper):
api_controller = USMAPIController(app, conf)
for key, value in proxy_consts.USM_PATH_MAP.items():
self._add_resource(mapper, api_controller, value, key, CONF.type)
class VersionAcceptor(Router):
def __init__(self, app, conf):

View File

@ -16,8 +16,11 @@
import grp
import json
import os
import pathlib
import pwd
import shutil
import tempfile
import threading
from eventlet.green import subprocess
from oslo_config import cfg
@ -28,6 +31,7 @@ import psutil
import tsconfig.tsconfig as tsc
import webob.dec
import webob.exc
from webob import Response
from dccommon import consts as dccommon_consts
from dccommon.drivers.openstack.sdk_platform import OpenStackDriver
@ -42,9 +46,11 @@ from dcorch.api.proxy.common import utils as proxy_utils
from dcorch.common import consts
from dcorch.common import context as k_context
from dcorch.common import exceptions as exception
from dcorch.common import usm_util
from dcorch.common import utils
from dcorch.rpc import client as rpc_client
LOG = logging.getLogger(__name__)
controller_opts = [
@ -890,6 +896,240 @@ class SysinvAPIController(APIController):
raise webob.exc.HTTPNotFound(explanation=str(e))
class InsufficientDiskspace(Exception):
pass
class LocalStorage(object):
def __init__(self):
self._storage = threading.local()
def get_value(self, key):
if hasattr(self._storage, key):
return getattr(self._storage, key)
else:
return None
def set_value(self, key, value):
setattr(self._storage, key, value)
def void_value(self, key):
if hasattr(self._storage, key):
delattr(self._storage, key)
class USMAPIController(APIController):
ENDPOINT_TYPE = dccommon_consts.ENDPOINT_TYPE_SOFTWARE
OK_STATUS_CODE = [
webob.exc.HTTPOk.code,
webob.exc.HTTPAccepted.code,
webob.exc.HTTPNoContent.code,
]
@property
def tmp_dir(self):
return self._local_storage.get_value("tmp_dir")
@tmp_dir.setter
def tmp_dir(self, value):
self._local_storage.set_value("tmp_dir", value)
@property
def my_copy(self):
return self._local_storage.get_value("my_copy")
@my_copy.setter
def my_copy(self, value):
self._local_storage.set_value("my_copy", value)
@property
def upload_files(self):
return self._local_storage.get_value("upload_files")
@upload_files.setter
def upload_files(self, value):
self._local_storage.set_value("upload_files", value)
def __init__(self, app, conf):
super(USMAPIController, self).__init__(app, conf)
self.response_hander_map = {self.ENDPOINT_TYPE: self._process_response}
self._local_storage = LocalStorage()
self.upload_files = []
self.my_copy = False
self.tmp_dir = None
self.software_vault = dccommon_consts.SOFTWARE_VAULT_DIR
@webob.dec.wsgify(RequestClass=Request)
def __call__(self, req):
if CONF.show_request:
self.print_request(req)
environ = req.environ
self.upload_files = []
content_type = req.content_type
new_request = req
new_request.body = req.body
if content_type == "text/plain":
# --local
self.upload_files = list(json.loads(req.body))
self.my_copy = False
else:
LOG.info("save uploaded files to local storage")
# upload. save files to scratch then perform a --local
request_data = list(req.POST.items())
uploaded_files = sorted(set(request_data))
self._create_temp_storage()
# Save all uploaded files to tmp_dir
for file_item in uploaded_files:
try:
filename = self._save_upload_file(file_item[1])
except InsufficientDiskspace as e:
self._cleanup_temp_storage()
ret = {"info": "", "warning": "", "error": str(e)}
response = Response(body=json.dumps(ret), status=500)
return response
self.upload_files.append(filename)
new_request.content_type = "text/plain"
new_request.body = json.dumps(self.upload_files).encode(new_request.charset)
self.my_copy = True
application = self.process_request(new_request)
response = req.get_response(application)
resp = self.process_response(environ, new_request, response)
self._cleanup_temp_storage()
return resp
def _cleanup_temp_storage(self):
if self.tmp_dir:
shutil.rmtree(self.tmp_dir, ignore_errors=True)
self.tmp_dir = None
def _save_upload_file(self, file_item):
file_name = file_item.filename
target_dir = self.tmp_dir
file_item.file.seek(0, os.SEEK_END)
file_size = file_item.file.tell()
avail_space = shutil.disk_usage(target_dir).free
if file_size > avail_space:
LOG.error(
"Not enough space to save file %s in %s \n "
+ "Available %s bytes. File size %s",
file_name,
target_dir,
avail_space,
file_size,
)
raise InsufficientDiskspace(f"Insufficient disk space in {self.tmp_dir}")
target_file = os.path.join(target_dir, os.path.basename(file_name))
with open(target_file, "wb") as destination_file:
destination_file.write(file_item.value)
return target_file
def _process_response(self, environ, request, response):
def is_usm_software(fn):
return os.path.splitext(fn)[-1] in [".iso", ".patch"]
try:
resource_type = self._get_resource_type_from_environ(environ)
operation_type = proxy_utils.get_operation_type(environ)
if self.get_status_code(response) in self.OK_STATUS_CODE:
LOG.info("resource type %s" % resource_type)
if resource_type == consts.RESOURCE_TYPE_USM_RELEASE:
if operation_type == consts.OPERATION_TYPE_POST:
body = response.body
if isinstance(body, bytes):
body = body.decode()
files = usm_util.parse_upload(body)
releases = [f for f in files if is_usm_software(f["filename"])]
for release in releases:
sw_version = usm_util.get_major_release_version(
release["sw_release"]
)
self._save_load_to_vault(sw_version)
sw_versions = self._get_major_releases(environ, request)
LOG.info("current available software versions %s" % sw_versions)
if sw_versions:
dcvault_versions = self._get_version_from_dcvault()
LOG.info("software in dcvault %s" % dcvault_versions)
self._audit_dcvault(sw_versions, dcvault_versions)
return response
finally:
proxy_utils.cleanup(environ)
def _get_major_releases(self, environ, request):
new_request = request
new_request.body = None
new_environ = environ
new_environ["REQUEST_METHOD"] = "GET"
new_environ["PATH_INFO"] = "/v1/release/"
new_request = Request(new_environ)
application = self.process_request(new_request)
resp = new_request.get_response(application)
if self.get_status_code(resp) not in self.OK_STATUS_CODE:
# can't retrieve software list at the moment
return None
data = json.loads(resp.body)
sw_versions = []
for d in data:
sw_version = usm_util.get_component_and_versions(d["release_id"])[2]
if sw_version and sw_version not in sw_versions:
sw_versions.append(sw_version)
return sw_versions
def _get_version_from_dcvault(self):
if os.path.exists(self.software_vault):
dirs = os.listdir(self.software_vault)
return dirs
return []
def _audit_dcvault(self, sw_versions, dcvalut_versions):
for dcvault_ver in dcvalut_versions:
if dcvault_ver not in sw_versions:
self._remove_load_from_vault(dcvault_ver)
def _create_temp_storage(self):
self.tmp_dir = tempfile.mkdtemp(prefix="upload", dir="/scratch")
LOG.info("created %s" % self.tmp_dir)
return self.tmp_dir
def _save_load_to_vault(self, sw_version):
versioned_vault = os.path.join(self.software_vault, sw_version)
pathlib.Path(versioned_vault).mkdir(parents=True, exist_ok=True)
if not self.my_copy:
self._create_temp_storage()
for upload_file in self.upload_files:
base_name = os.path.basename(upload_file)
target_file = os.path.join(self.tmp_dir, base_name)
shutil.copy(upload_file, target_file)
# Move the files to the final location
for upload_file in self.upload_files:
base_name = os.path.basename(upload_file)
target_file = os.path.join(versioned_vault, base_name)
src_file = os.path.join(self.tmp_dir, base_name)
shutil.move(src_file, target_file)
LOG.info("Release %s (%s) saved to vault." % (self.upload_files, sw_version))
def _remove_load_from_vault(self, sw_version):
versioned_vault = os.path.join(self.software_vault, sw_version)
if os.path.isdir(versioned_vault):
shutil.rmtree(versioned_vault)
LOG.info("Load (%s) removed from vault." % sw_version)
class IdentityAPIController(APIController):
ENDPOINT_TYPE = dccommon_consts.ENDPOINT_TYPE_IDENTITY

View File

@ -78,6 +78,8 @@ USER_PATHS = ["/v1/iuser/{uuid}"]
LOAD_PATHS = ["/v1/loads/import_load", "/v1/loads/{id}"]
RELEASE_PATHS = ["//v1/release", "//v1/release/{rel_id}"]
DEVICE_IMAGE_PATHS = ["/v1/device_images", "/v1/device_images/{uuid}"]
SYSINV_PATH_MAP = {
@ -87,6 +89,10 @@ SYSINV_PATH_MAP = {
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: DEVICE_IMAGE_PATHS,
}
USM_PATH_MAP = {
consts.RESOURCE_TYPE_USM_RELEASE: RELEASE_PATHS,
}
LOAD_FILES_STAGING_DIR = "/scratch/tmp_load"
IMPORT_LOAD_FILES = ["path_to_iso", "path_to_sig"]
IMPORTED_LOAD_MAX_COUNT = 1
@ -359,9 +365,13 @@ ROUTE_METHOD_MAP = {
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: ["PUT", "DELETE"],
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS: ["DELETE"],
},
dccommon_consts.ENDPOINT_TYPE_SOFTWARE: {
consts.RESOURCE_TYPE_USM_RELEASE: ["PUT", "POST", "DELETE"],
},
}
LOAD_VAULT_DIR = "/opt/dc-vault/loads"
LOAD_VAULT_TMP_DIR = "/opt/dc-vault/loads/load_tmpdir"
ENDPOINT_TYPE_PATCHING_TMPDIR = "/scratch/patch-api-proxy-tmpdir"
ENDPOINT_TYPE_PLATFORM_TMPDIR = "/scratch/platform-api-proxy-tmpdir"
ENDPOINT_TYPE_USM_TMPDIR = "/scratch/software-upload-tmpdir"

View File

@ -107,6 +107,8 @@ def main():
make_tempdir(constants.ENDPOINT_TYPE_PATCHING_TMPDIR)
elif CONF.type == consts.ENDPOINT_TYPE_PLATFORM:
make_tempdir(constants.ENDPOINT_TYPE_PLATFORM_TMPDIR)
elif CONF.type == consts.ENDPOINT_TYPE_USM:
make_tempdir(constants.ENDPOINT_TYPE_USM_TMPDIR)
service = wsgi.Server(CONF, CONF.prog, application, host, port)

View File

@ -256,24 +256,6 @@ fernet_opts = [
)
]
usm_proxy_opts = [
cfg.StrOpt(
"bind_host",
default="0.0.0.0",
help="IP address for API proxy to listen for incoming connections",
),
cfg.IntOpt("bind_port", default=25497, help="listen port for API proxy"),
cfg.StrOpt(
"remote_host",
default="0.0.0.0",
help="The remote host address used for outgoing API proxy connection",
),
cfg.IntOpt(
"remote_port",
default=5497,
help="The remote port used for outgoing API proxy connection",
),
]
scheduler_opt_group = cfg.OptGroup(
"scheduler", title="Scheduler options for periodic job"
@ -285,8 +267,6 @@ default_quota_group = cfg.OptGroup(
# The group stores the pecan configurations.
pecan_group = cfg.OptGroup(name="pecan", title="Pecan options")
usm_group = cfg.OptGroup(name="usm", title="USM proxy options")
cache_opt_group = cfg.OptGroup(name="cache", title="OpenStack Admin Credentials")
endpoint_cache_opt_group = cfg.OptGroup(
@ -310,7 +290,6 @@ def list_opts():
yield scheduler_opt_group.name, scheduler_opts
yield pecan_group.name, pecan_opts
yield fernet_opt_group.name, fernet_opts
yield usm_group.name, usm_proxy_opts
yield None, global_opts
yield None, common_opts

View File

@ -35,6 +35,7 @@ RESOURCE_TYPE_SYSINV_CERTIFICATE = "certificates"
RESOURCE_TYPE_SYSINV_USER = "iuser"
RESOURCE_TYPE_SYSINV_FERNET_REPO = "fernet_repo"
RESOURCE_TYPE_SYSINV_LOAD = "loads"
RESOURCE_TYPE_USM_RELEASE = "release"
RESOURCE_TYPE_SYSINV_DEVICE_IMAGE = "device_image"
# Compute Resources

View File

@ -0,0 +1,45 @@
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import json
from oslo_log import log as logging
# pylint: disable=unused-import
from software.utils import get_component_and_versions # noqa: F401
# pylint: disable=unused-import
from software.utils import get_major_release_version # noqa: F401
LOG = logging.getLogger(__name__)
__ALL__ = ("get_major_release_version", "get_component_and_versions", "parse_upload")
def parse_upload(resp):
files = []
resp_str = str(resp)
try:
data = json.loads(resp_str)
except json.JSONDecodeError:
LOG.error("invalid json format. %s" % resp_str)
return files
upload_info = data.get("upload_info")
if upload_info is None:
return files
for upload_file in upload_info:
for filename in upload_file:
filedata = upload_file[filename]
sw_release = filedata.get("sw_release")
release_id = filedata.get("id")
if sw_release and release_id:
files.append(
{
"filename": filename,
"release_id": release_id,
"sw_release": sw_release,
}
)
return files

View File

@ -0,0 +1,329 @@
#!/bin/sh
# OpenStack DC Orchestrator USM Api Proxy Service (dcorch-usm-api-proxy)
#
# Description:
# Manages an OpenStack DC Orchestrator USM Api Proxy Service (dcorch-usm-api-proxy)
# process as an HA resource
#
# Copyright (c) 2018,2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
#
# See usage() function below for more details ...
#
# OCF instance parameters:
# OCF_RESKEY_binary
# OCF_RESKEY_config
# OCF_RESKEY_user
# OCF_RESKEY_pid
# OCF_RESKEY_additional_parameters
#######################################################################
# Initialization:
: ${OCF_FUNCTIONS_DIR=${OCF_ROOT}/lib/heartbeat}
. ${OCF_FUNCTIONS_DIR}/ocf-shellfuncs
#######################################################################
# Fill in some defaults if no values are specified
OCF_RESKEY_binary_default="/usr/bin/dcorch-api-proxy"
OCF_RESKEY_config_default="/etc/dcorch/dcorch.conf"
OCF_RESKEY_user_default="root"
OCF_RESKEY_pid_default="$HA_RSCTMP/$OCF_RESOURCE_INSTANCE.pid"
: ${OCF_RESKEY_binary=${OCF_RESKEY_binary_default}}
: ${OCF_RESKEY_config=${OCF_RESKEY_config_default}}
: ${OCF_RESKEY_user=${OCF_RESKEY_user_default}}
: ${OCF_RESKEY_pid=${OCF_RESKEY_pid_default}}
#######################################################################
usage() {
cat <<UEND
usage: $0 (start|stop|validate-all|meta-data|status|monitor)
$0 manages an OpenStack DC Orchestrator USM Api Proxy service (dcorch-usm-api-proxy) process as an HA resource
The 'start' operation starts the dcorch-usm-api-proxy service.
The 'stop' operation stops the dcorch-usm-api-proxy service.
The 'validate-all' operation reports whether the parameters are valid
The 'meta-data' operation reports this RA's meta-data information
The 'status' operation reports whether the dcorch-usm-api-proxy service is running
The 'monitor' operation reports whether the dcorch-usm-api-proxy service seems to be working
UEND
}
meta_data() {
cat <<END
<?xml version="1.0"?>
<!DOCTYPE resource-agent SYSTEM "ra-api-1.dtd">
<resource-agent name="dcorch-usm-api-proxy">
<version>1.0</version>
<longdesc lang="en">
Resource agent for the DC Orchestrator USM Api proxy service (dcorch-usm-api-proxy)
</longdesc>
<shortdesc lang="en">Manages the OpenStack DC Orchestrator USM Api Proxy Service (dcorch-usm-api-proxy)</shortdesc>
<parameters>
<parameter name="binary" unique="0" required="0">
<longdesc lang="en">
Location of the DC Orchestrator USM Api proxy server binary (dcorch-usm-api-proxy)
</longdesc>
<shortdesc lang="en">DC Orchestrator USM Api proxy server binary (dcorch-usm-api-proxy)</shortdesc>
<content type="string" default="${OCF_RESKEY_binary_default}" />
</parameter>
<parameter name="config" unique="0" required="0">
<longdesc lang="en">
Location of the DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) configuration file
</longdesc>
<shortdesc lang="en">DC Orchestrator USM Api proxy (dcorch-usm-api-proxy registry) config file</shortdesc>
<content type="string" default="${OCF_RESKEY_config_default}" />
</parameter>
<parameter name="user" unique="0" required="0">
<longdesc lang="en">
User running DC Orchestrator USM Api proxy (dcorch-usm-api-proxy)
</longdesc>
<shortdesc lang="en">DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) user</shortdesc>
<content type="string" default="${OCF_RESKEY_user_default}" />
</parameter>
<parameter name="pid" unique="0" required="0">
<longdesc lang="en">
The pid file to use for this DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) instance
</longdesc>
<shortdesc lang="en">DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) pid file</shortdesc>
<content type="string" default="${OCF_RESKEY_pid_default}" />
</parameter>
<parameter name="additional_parameters" unique="0" required="0">
<longdesc lang="en">
Additional parameters to pass on to this DC Orchestrator USM API (dcorch-usm-api-proxy)
</longdesc>
<shortdesc lang="en">Additional parameters for dcorch-usm-api-proxy</shortdesc>
<content type="string" />
</parameter>
</parameters>
<actions>
<action name="start" timeout="20" />
<action name="stop" timeout="20" />
<action name="status" timeout="20" />
<action name="monitor" timeout="10" interval="5" />
<action name="validate-all" timeout="5" />
<action name="meta-data" timeout="5" />
</actions>
</resource-agent>
END
}
#######################################################################
# Functions invoked by resource manager actions
dcorch_usm_api_proxy_validate() {
local rc
check_binary $OCF_RESKEY_binary
check_binary curl
check_binary tr
check_binary grep
check_binary cut
check_binary head
# A config file on shared storage that is not available
# during probes is OK.
if [ ! -f $OCF_RESKEY_config ]; then
if ! ocf_is_probe; then
ocf_log err "Config $OCF_RESKEY_config doesn't exist"
return $OCF_ERR_INSTALLED
fi
ocf_log_warn "Config $OCF_RESKEY_config not available during a probe"
fi
getent passwd $OCF_RESKEY_user >/dev/null 2>&1
rc=$?
if [ $rc -ne 0 ]; then
ocf_log err "User $OCF_RESKEY_user doesn't exist"
return $OCF_ERR_INSTALLED
fi
true
}
dcorch_usm_api_proxy_status() {
local pid
local rc
if [ ! -f $OCF_RESKEY_pid ]; then
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) is not running"
return $OCF_NOT_RUNNING
else
pid=`cat $OCF_RESKEY_pid`
fi
ocf_run -warn kill -s 0 $pid
rc=$?
if [ $rc -eq 0 ]; then
return $OCF_SUCCESS
else
ocf_log info "Old PID file found, but DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) is not running"
rm -f $OCF_RESKEY_pid
return $OCF_NOT_RUNNING
fi
}
dcorch_usm_api_proxy_monitor() {
local rc
dcorch_usm_api_proxy_status
rc=$?
# If status returned anything but success, return that immediately
if [ $rc -ne $OCF_SUCCESS ]; then
return $rc
fi
# Further verify the service availibility.
ocf_log debug "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) monitor succeeded"
return $OCF_SUCCESS
}
dcorch_usm_api_proxy_start() {
local rc
dcorch_usm_api_proxy_status
rc=$?
if [ $rc -eq $OCF_SUCCESS ]; then
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) already running"
return $OCF_SUCCESS
fi
# Change the working dir to /, to be sure it's accesible
cd /
# run the actual dcorch-usm-api-proxy daemon. Don't use ocf_run as we're sending the tool's output
# straight to /dev/null anyway and using ocf_run would break stdout-redirection here.
ocf_log info "su ${OCF_RESKEY_user} -s /bin/sh -c \"${OCF_RESKEY_binary} --config-file=$OCF_RESKEY_config --type usm \
$OCF_RESKEY_additional_parameters\"' >> /dev/null 2>&1 & echo $!' > $OCF_RESKEY_pid"
su ${OCF_RESKEY_user} -s /bin/sh -c "${OCF_RESKEY_binary} --config-file=$OCF_RESKEY_config --type usm \
$OCF_RESKEY_additional_parameters"' >> /dev/null 2>&1 & echo $!' > $OCF_RESKEY_pid
# Spin waiting for the server to come up.
# Let the CRM/LRM time us out if required
while true; do
dcorch_usm_api_proxy_monitor
rc=$?
[ $rc -eq $OCF_SUCCESS ] && break
if [ $rc -ne $OCF_NOT_RUNNING ]; then
ocf_log err "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) start failed"
exit $OCF_ERR_GENERIC
fi
sleep 1
done
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) started"
return $OCF_SUCCESS
}
dcorch_usm_api_proxy_confirm_stop() {
local my_bin
local my_processes
my_binary=`which ${OCF_RESKEY_binary}`
my_type="usm"
my_processes=`pgrep -f "^(python|/usr/bin/python|/usr/bin/python3) ${my_binary} .*--type ${my_type}([^\w-]|$)"`
if [ -n "${my_processes}" ]
then
ocf_log info "About to SIGKILL the following: ${my_processes}"
# replace the new line with with a space in the process list
kill -9 `echo "${my_processes}" | tr '\n' ' '`
fi
}
dcorch_usm_api_proxy_stop() {
local rc
local pid
dcorch_usm_api_proxy_status
rc=$?
if [ $rc -eq $OCF_NOT_RUNNING ]; then
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) already stopped"
dcorch_usm_api_proxy_confirm_stop
return $OCF_SUCCESS
fi
# Try SIGTERM
pid=`cat $OCF_RESKEY_pid`
ocf_run kill -s TERM $pid
rc=$?
if [ $rc -ne 0 ]; then
ocf_log err "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) couldn't be stopped"
dcorch_usm_api_proxy_confirm_stop
exit $OCF_ERR_GENERIC
fi
# stop waiting
shutdown_timeout=15
if [ -n "$OCF_RESKEY_CRM_meta_timeout" ]; then
shutdown_timeout=$((($OCF_RESKEY_CRM_meta_timeout/1000)-5))
fi
count=0
while [ $count -lt $shutdown_timeout ]; do
dcorch_usm_api_proxy_status
rc=$?
if [ $rc -eq $OCF_NOT_RUNNING ]; then
break
fi
count=`expr $count + 1`
sleep 1
ocf_log debug "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) still hasn't stopped yet. Waiting ..."
done
dcorch_usm_api_proxy_status
rc=$?
if [ $rc -ne $OCF_NOT_RUNNING ]; then
# SIGTERM didn't help either, try SIGKILL
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) failed to stop after ${shutdown_timeout}s \
using SIGTERM. Trying SIGKILL ..."
ocf_run kill -s KILL $pid
fi
dcorch_usm_api_proxy_confirm_stop
ocf_log info "DC Orchestrator USM Api proxy (dcorch-usm-api-proxy) stopped"
rm -f $OCF_RESKEY_pid
return $OCF_SUCCESS
}
#######################################################################
case "$1" in
meta-data) meta_data
exit $OCF_SUCCESS;;
usage|help) usage
exit $OCF_SUCCESS;;
esac
# Anything except meta-data and help must pass validation
dcorch_usm_api_proxy_validate || exit $?
# What kind of method was invoked?
case "$1" in
start) dcorch_usm_api_proxy_start;;
stop) dcorch_usm_api_proxy_stop;;
status) dcorch_usm_api_proxy_status;;
monitor) dcorch_usm_api_proxy_monitor;;
validate-all) ;;
*) usage
exit $OCF_ERR_UNIMPLEMENTED;;
esac

View File

@ -16,6 +16,7 @@ fmclient_src_dir = {[dc]stx_fault_dir}/python-fmclient/fmclient
fm_api_src_dir = {[dc]stx_fault_dir}/fm-api/source
nfv_client_src_dir = ../../nfv/nfv/nfv-client
tsconfig_src_dir = {[dc]stx_config_dir}/tsconfig/tsconfig
software_src_dir = ../../update/software
[testenv]
basepython = python3.9
@ -38,6 +39,7 @@ deps =
-e{[dc]fm_api_src_dir}
-e{[dc]nfv_client_src_dir}
-e{[dc]tsconfig_src_dir}
-e{[dc]software_src_dir}
allowlist_externals =
rm
find