Introduce dcagent API and periodic info gathering

This commit introduces the new dcagent package. It is comprised of a
periodic process that queries the necessary endpoints to gather the
audit data and an API running on port 8325 (internal) and 8326 (admin).
The api only has one endpoint /v1/dcaudit that accepts only PATCH and
will respond with 'in-sync' or 'out-of-sync' for dcmanager-audit based
on the RegionOne data provided or will return the subcloud data for the
requested endpoints for dcorch-audit.

The agent also supports a key 'use_cache' to be sent in the payload
that will determine if it should use the cache data gathered by the
periodic process or get new information on the fly.

Example of payload using cached data:
  {
    "base_audit": "",
    "firmware_audit": "<regionone-audit-data>",
    "kubernetes_audit": "<regionone-audit-data>",
    "kube_rootca_audit" : "<regionone-audit-data>",
    "software_audit": "<regionone-audit-data>"
  }

Example of payload requesting new information:
  {
    "certificates": "",
    "iuser": "",
    "fernet_repo": "",
    "use_cache": "false"
  }

NOTES:
  - As patch and load audits will be deprecated in the next major
    release, no effort was made to integrate both patch and load audit
    to dcagent.
  - All tests described below were executed applying [1] as well,
    to avoid retesting.

[1]: https://review.opendev.org/c/starlingx/distcloud/+/923351

Test plan:
  - PASS: Run dcmanager audit with dcagent. Verify only one call is
          made to audit the subcloud and the response include the
          correct sync status.
  - PASS: Run dcmanager audit without dcagent. Verify the audit
          works as expected querying each individual endpoint.

Story: 2011106
Task: 50559

Change-Id: I1820ca9688d5d05f8712f9a42f6012f2ec3e2d8a
Signed-off-by: Victor Romano <victor.gluzromano@windriver.com>
This commit is contained in:
Victor Romano 2024-07-02 00:34:21 -03:00
parent d4d548d7c6
commit 4ceac85048
39 changed files with 1829 additions and 4 deletions

View File

@ -2,13 +2,14 @@
# otherwise testr will pass --source distributedcloud when invoking coverage
# which breaks the source definitions in the .coveragerc file
[DEFAULT]
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1}
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1}
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60}
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1}
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1}
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60}
PYTHON=$(echo ${PYTHON:-python} | sed 's/--source distributedcloud//g')
${PYTHON} -m subunit.run discover -s dccommon $LISTOPT $IDOPTION
${PYTHON} -m subunit.run discover -s dcmanager $LISTOPT $IDOPTION
${PYTHON} -m subunit.run discover -s dcorch $LISTOPT $IDOPTION
${PYTHON} -m subunit.run discover -s dcagent $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list
test_run_concurrency=echo 5

View File

@ -0,0 +1,10 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pbr.version
__version__ = pbr.version.VersionInfo("distributedcloud").version_string()

View File

View File

@ -0,0 +1,77 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Routines for configuring DC agent, largely copied from Neutron
"""
import sys
from oslo_config import cfg
from oslo_log import log as logging
from dcagent.common.i18n import _
from dcagent.common import version
LOG = logging.getLogger(__name__)
common_opts = [
cfg.StrOpt("bind_host", default="0.0.0.0", help=_("The host IP to bind to")),
cfg.IntOpt("bind_port", default=8325, help=_("The port to bind to")),
cfg.IntOpt("api_workers", default=1, help=_("number of api workers")),
cfg.StrOpt(
"auth_strategy", default="keystone", help=_("The type of authentication to use")
),
]
def init(args, **kwargs):
# Register the configuration options
cfg.CONF.register_opts(common_opts)
logging.register_options(cfg.CONF)
cfg.CONF(
args=args,
project="dcagent",
version="%%(prog)s %s" % version.version_info.release_string(),
**kwargs
)
def setup_logging():
"""Sets up the logging options for a log with supplied name."""
product_name = "dcagent"
logging.setup(cfg.CONF, product_name)
LOG.info("Logging enabled!")
LOG.info(
"%(prog)s version %(version)s",
{"prog": sys.argv[0], "version": version.version_info.release_string()},
)
LOG.debug("command line: %s", " ".join(sys.argv))
def reset_service():
# Reset worker in case SIGHUP is called.
# Note that this is called only in case a service is running in daemon mode.
setup_logging()
# TODO(vgluzrom) enforce policy later
# policy.refresh()
def test_init():
# Register the configuration options
cfg.CONF.register_opts(common_opts)
try:
logging.register_options(cfg.CONF)
except cfg.ArgsAlreadyParsedError:
pass
setup_logging()
def list_opts():
yield None, common_opts

View File

@ -0,0 +1,73 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
from keystonemiddleware import auth_token
from oslo_config import cfg
from oslo_middleware import request_id
from oslo_service import service
from dcagent.common import context as ctx
from dcagent.common.i18n import _
def setup_app(*args, **kwargs):
opts = cfg.CONF.pecan
config = {
"server": {"port": cfg.CONF.bind_port, "host": cfg.CONF.bind_host},
"app": {
"root": "dcagent.api.controllers.root.RootController",
"modules": ["dcagent.api"],
"debug": opts.debug,
"auth_enable": opts.auth_enable,
"errors": {400: "/error", "__force_dict__": True},
},
}
pecan_config = pecan.configuration.conf_from_dict(config)
app = pecan.make_app(
pecan_config.app.root,
debug=False,
wrap_app=_wrap_app,
force_canonical=False,
hooks=lambda: [ctx.AuthHook()],
guess_content_type_from_ext=True,
)
return app
def _wrap_app(app):
app = request_id.RequestId(app)
if cfg.CONF.pecan.auth_enable and cfg.CONF.auth_strategy == "keystone":
conf = dict(cfg.CONF.keystone_authtoken)
# Change auth decisions of requests to the app itself.
conf.update({"delay_auth_decision": True})
# NOTE: Policy enforcement works only if Keystone
# authentication is enabled. No support for other authentication
# types at this point.
return auth_token.AuthProtocol(app, conf)
else:
return app
_launcher = None
def serve(api_service, conf, workers=1):
global _launcher
if _launcher:
raise RuntimeError(_("serve() can only be called once"))
_launcher = service.launch(conf, api_service, workers=workers)
def wait():
_launcher.wait()

View File

@ -0,0 +1,102 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import abc
from pecan import expose
from pecan import request
import dcagent.common.context as k_context
def extract_context_from_environ():
context_params = {
"auth_token": "HTTP_X_AUTH_TOKEN",
"user": "HTTP_X_USER_ID",
"project": "HTTP_X_TENANT_ID",
"user_name": "HTTP_X_USER_NAME",
"tenant_name": "HTTP_X_PROJECT_NAME",
"domain": "HTTP_X_DOMAIN_ID",
"roles": "HTTP_X_ROLE",
"user_domain": "HTTP_X_USER_DOMAIN_ID",
"project_domain": "HTTP_X_PROJECT_DOMAIN_ID",
"request_id": "openstack.request_id",
}
environ = request.environ
for key, val in context_params.items():
context_params[key] = environ.get(val)
role = environ.get("HTTP_X_ROLE")
context_params["is_admin"] = "admin" in role.split(",")
return k_context.RequestContext(**context_params)
def extract_credentials_for_policy():
context_paras = {"project_name": "HTTP_X_PROJECT_NAME", "roles": "HTTP_X_ROLE"}
environ = request.environ
for key, val in context_paras.items():
context_paras[key] = environ.get(val)
context_paras["roles"] = context_paras["roles"].split(",")
return context_paras
def _get_pecan_data(obj):
return getattr(obj, "_pecan", {})
def _is_exposed(obj):
return getattr(obj, "exposed", False)
def _is_generic(obj):
data = _get_pecan_data(obj)
return "generic" in data.keys()
def _is_generic_handler(obj):
data = _get_pecan_data(obj)
return "generic_handler" in data.keys()
class GenericPathController(object, metaclass=abc.ABCMeta):
"""A controller that allows path parameters to be equal to handler names.
The _route method provides a custom route resolution that checks if the
next object is marked as generic or a generic handler, pointing to the
generic index method in case it is. Pecan will properly handle the rest
of the routing process by redirecting it to the proper method function
handler (GET, POST, PATCH, DELETE, etc.).
Useful when part of the URL contains path parameters that might have
the same name as an already defined exposed controller method.
Requires the definition of an index method with the generator:
@expose(generic=True, ...)
Does not support nested subcontrollers.
"""
RESERVED_NAMES = ("_route", "_default", "_lookup")
@abc.abstractmethod
def index(self):
pass
@expose()
def _route(self, remainder, request):
next_url_part, rest = remainder[0], remainder[1:]
next_obj = getattr(self, next_url_part, None)
is_generic = _is_generic(next_obj) or _is_generic_handler(next_obj)
is_reserved_name = next_url_part in self.__class__.RESERVED_NAMES
if _is_exposed(next_obj) and not is_generic and not is_reserved_name:
# A non-generic exposed method with a non-reserved name
return next_obj, rest
else:
return self.index, remainder

View File

@ -0,0 +1,43 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
from dcagent.api.controllers.v1 import root as v1_root
class RootController(object):
@pecan.expose("json")
def _lookup(self, version, *remainder):
version = str(version)
minor_version = version[-1]
major_version = version[1]
remainder = remainder + (minor_version,)
if major_version == "1":
return v1_root.Controller(), remainder
@pecan.expose(generic=True, template="json")
def index(self):
return {
"versions": [
{
"status": "CURRENT",
"links": [
{"rel": "self", "href": pecan.request.application_url + "/v1/"}
],
"id": "v1",
"updated": "2024-06-20",
}
]
}
@index.when(method="POST")
@index.when(method="PUT")
@index.when(method="DELETE")
@index.when(method="HEAD")
@index.when(method="PATCH")
def not_supported(self):
pecan.abort(405)

View File

@ -0,0 +1,55 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import http.client
import json
from oslo_config import cfg
from oslo_log import log as logging
import pecan
from pecan import expose
from pecan import request
from dcagent.common.audit_manager import RequestedAudit
from dcagent.common.exceptions import UnsupportedAudit
from dcagent.common.i18n import _
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class AuditController(object):
@expose(generic=True, template="json")
def index(self):
# Route the request to specific methods with parameters
pass
@index.when(method="PATCH", template="json")
def patch(self):
"""Return the audit information."""
# Convert JSON string in request to Python dict
try:
payload = json.loads(request.body)
except ValueError:
pecan.abort(http.client.BAD_REQUEST, _("Request body decoding error"))
if not payload:
pecan.abort(http.client.BAD_REQUEST, _("Body required"))
try:
# Delete "use_cache" from payload so it doesn't get passed as an audit
use_cache = payload.pop("use_cache", True)
requested_audit = RequestedAudit(use_cache=use_cache)
return requested_audit.get_sync_status(payload)
except UnsupportedAudit as ex:
LOG.exception(ex)
pecan.abort(http.client.BAD_REQUEST, ex.msg)
except Exception as ex:
LOG.exception(ex)
msg = f"Unable to get audit info: {ex}"
pecan.abort(http.client.INTERNAL_SERVER_ERROR, _(msg))

View File

@ -0,0 +1,42 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pecan
from dcagent.api.controllers.v1 import audit
class Controller(object):
def _get_resource_controller(self, remainder):
if not remainder:
pecan.abort(404)
return
remainder = remainder[:-1]
res_controllers = dict()
res_controllers["dcaudit"] = audit.AuditController
for name, ctrl in res_controllers.items():
setattr(self, name, ctrl)
try:
resource = remainder[0]
except IndexError:
pecan.abort(404)
return
if resource not in res_controllers:
pecan.abort(404)
return
remainder = remainder[1:]
return res_controllers[resource](), remainder
@pecan.expose()
def _lookup(self, *remainder):
return self._get_resource_controller(remainder)

View File

@ -0,0 +1,13 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import itertools
from dcagent.api.policies import base
def list_rules():
return itertools.chain(base.list_rules())

View File

@ -0,0 +1,28 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_policy import policy
ADMIN_IN_SYSTEM_PROJECTS = "admin_in_system_projects"
READER_IN_SYSTEM_PROJECTS = "reader_in_system_projects"
base_rules = [
policy.RuleDefault(
name=ADMIN_IN_SYSTEM_PROJECTS,
check_str="role:admin and (project_name:admin or " + "project_name:services)",
description="Base rule.",
),
policy.RuleDefault(
name=READER_IN_SYSTEM_PROJECTS,
check_str="role:reader and (project_name:admin or " + "project_name:services)",
description="Base rule.",
),
]
def list_rules():
return base_rules

View File

@ -0,0 +1,52 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""Policy engine For DC."""
from oslo_config import cfg
from oslo_policy import policy
from webob import exc
from dcmanager.api import policies as controller_policies
CONF = cfg.CONF
_ENFORCER = None
def reset():
"""Discard current Enforcer object."""
global _ENFORCER
_ENFORCER = None
def init(policy_file="policy.yaml"):
"""Init an Enforcer class.
:param policy_file: Custom policy file to be used.
:return: Returns a Enforcer instance.
"""
global _ENFORCER
if not _ENFORCER:
# https://docs.openstack.org/oslo.policy/latest/user/usage.html
_ENFORCER = policy.Enforcer(
CONF,
policy_file=policy_file,
default_rule="default",
use_conf=True,
overwrite=True,
)
_ENFORCER.register_defaults(controller_policies.list_rules())
return _ENFORCER
def authorize(rule, target, creds, do_raise=True):
"""A wrapper around 'authorize' from 'oslo_policy.policy'."""
init()
return _ENFORCER.authorize(
rule, target, creds, do_raise=do_raise, exc=exc.HTTPForbidden
)

View File

View File

@ -0,0 +1,62 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
DC Agent Periodic Audit Service.
"""
import sys
import eventlet
eventlet.monkey_patch()
# pylint: disable=wrong-import-position
from oslo_config import cfg # noqa: E402
from oslo_i18n import _lazy # noqa: E402
from oslo_log import log as logging # noqa: E402
from oslo_service import service as oslo_service # noqa: E402
from oslo_service import systemd # noqa: E402
from oslo_service import wsgi # noqa: E402
from dcagent.api import api_config # noqa: E402
from dcagent.api import app # noqa: E402
from dcagent.common.audit_manager import PeriodicAudit # noqa: E402
from dcagent.common import config # noqa: E402
# pylint: enable=wrong-import-position
_lazy.enable_lazy()
config.register_options()
LOG = logging.getLogger("dcagent")
CONF = cfg.CONF
WORKERS = 1
def main():
api_config.init(sys.argv[1:])
api_config.setup_logging()
application = app.setup_app()
host = CONF.bind_host
port = CONF.bind_port
LOG.info(f"Server on http://{host}:{port} with {WORKERS} worker")
systemd.notify_once()
service = wsgi.Server(CONF, "DCAgent", application, host, port)
app.serve(service, CONF, WORKERS)
srv = PeriodicAudit()
launcher = oslo_service.launch(cfg.CONF, srv, workers=WORKERS)
LOG.info("Starting Dcagent...")
cfg.CONF.log_opt_values(LOG, logging.DEBUG)
launcher.wait()
if __name__ == "__main__":
main()

View File

@ -0,0 +1,123 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import eventlet
from eventlet.greenpool import GreenPool
from oslo_config import cfg
from oslo_log import log as logging
from dcagent.common.exceptions import UnsupportedAudit
from dcagent.common.utils import BaseAuditManager
from dccommon import consts as dccommon_consts
from dcmanager.audit.base_audit import get_subcloud_base_audit
from dcmanager.audit.firmware_audit import FirmwareAudit
from dcmanager.audit.kube_rootca_update_audit import KubeRootcaUpdateAudit
from dcmanager.audit.kubernetes_audit import KubernetesAudit
from dcmanager.audit.software_audit import SoftwareAudit
from dcorch.common import consts as dcorch_consts
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
SYSINV_REQUEST_MAP = {
dcorch_consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: "get_certificates",
dcorch_consts.RESOURCE_TYPE_SYSINV_USER: "get_user",
dcorch_consts.RESOURCE_TYPE_SYSINV_FERNET_REPO: "get_fernet_keys",
}
class PeriodicAudit(BaseAuditManager):
def __init__(self):
super().__init__()
self.periodic_audit_loop()
def periodic_audit_loop(self):
while True:
try:
self.initialize_clients(use_cache=False)
self._run_periodic_audit_loop()
eventlet.greenthread.sleep(CONF.scheduler.dcagent_audit_interval)
except eventlet.greenlet.GreenletExit:
# We have been told to exit
return
except Exception:
LOG.exception("Error in periodic audit loop")
def _run_periodic_audit_loop(self):
# NOTE: We don't care about the return value of the audit functions
# as the execution here is only used as a way to refresh the cache
get_subcloud_base_audit(
sysinv_client=self.sysinv_client, fm_client=self.fm_client
)
FirmwareAudit.get_subcloud_audit_data(self.sysinv_client)
KubernetesAudit.get_subcloud_audit_data(self.sysinv_client)
KubeRootcaUpdateAudit.get_subcloud_audit_data(
self.sysinv_client, self.fm_client
)
SoftwareAudit.get_subcloud_audit_data(self.software_client)
class RequestedAudit(BaseAuditManager):
def __init__(self, use_cache: bool = True):
super().__init__()
self.use_cache = use_cache
def get_single_audit_status(self, audit_type, regionone_audit_data):
# Since this run in parallel, we need to initialize the clients
# here to not use the same socket in every call
sysinv_client, fm_client, software_client = self.initialize_clients(
use_cache=self.use_cache
)
if audit_type == dccommon_consts.BASE_AUDIT:
(availability, inactive_sg, alarms) = get_subcloud_base_audit(
sysinv_client=sysinv_client, fm_client=fm_client
)
resp = {
"availability": availability,
"inactive_sg": inactive_sg,
"alarms": alarms,
}
elif audit_type == dccommon_consts.FIRMWARE_AUDIT:
resp = FirmwareAudit.get_subcloud_sync_status(
sysinv_client, regionone_audit_data
)
elif audit_type == dccommon_consts.KUBE_ROOTCA_AUDIT:
resp = KubeRootcaUpdateAudit.get_subcloud_sync_status(
sysinv_client, fm_client, regionone_audit_data
)
elif audit_type == dccommon_consts.KUBERNETES_AUDIT:
resp = KubernetesAudit.get_subcloud_sync_status(
sysinv_client, regionone_audit_data
)
elif audit_type == dccommon_consts.SOFTWARE_AUDIT:
resp = SoftwareAudit.get_subcloud_sync_status(
software_client, regionone_audit_data
)
elif audit_type in SYSINV_REQUEST_MAP:
resp = getattr(sysinv_client, SYSINV_REQUEST_MAP[audit_type])()
else:
raise UnsupportedAudit(audit=audit_type)
# If the response is an object or a list of object, convert it
# to a dictionary before returning
if "to_dict" in dir(resp):
resp = resp.to_dict()
elif isinstance(resp, list):
resp = [r.to_dict() for r in resp if "to_dict" in dir(r)]
return audit_type, resp
def get_sync_status(self, payload):
sync_resp = {}
pool = GreenPool(size=10)
jobs = [
pool.spawn(self.get_single_audit_status, audit_type, regionone_audit_data)
for audit_type, regionone_audit_data in payload.items()
]
for job in jobs:
audit_type, resp = job.wait()
sync_resp[audit_type] = resp
return sync_resp

View File

@ -0,0 +1,150 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
File to store all the configurations
"""
from oslo_config import cfg
from oslo_utils import importutils
# Ensure keystonemiddleware options are imported
importutils.import_module("keystonemiddleware.auth_token")
# OpenStack credentials used for Endpoint Cache
# We need to register the below non-standard config options to dcagent engine
keystone_opts = [
cfg.StrOpt("username", help="Username of account"),
cfg.StrOpt("password", help="Password of account"),
cfg.StrOpt("project_name", help="Tenant name of account"),
cfg.StrOpt(
"user_domain_name", default="Default", help="User domain name of account"
),
cfg.StrOpt(
"project_domain_name", default="Default", help="Project domain name of account"
),
]
# Pecan_opts
pecan_opts = [
cfg.StrOpt(
"root",
default="dcagent.api.controllers.root.RootController",
help="Pecan root controller",
),
cfg.ListOpt(
"modules",
default=["dcagent.api"],
help="A list of modules where pecan will search for applications.",
),
cfg.BoolOpt(
"debug",
default=False,
help="Enables the ability to display tracebacks in the browser and "
"interactively debug during development.",
),
cfg.BoolOpt(
"auth_enable", default=True, help="Enables user authentication in pecan."
),
]
# OpenStack credentials used for Endpoint Cache
cache_opts = [
cfg.StrOpt("auth_uri", help="Keystone authorization url"),
cfg.StrOpt("identity_uri", help="Keystone service url"),
cfg.StrOpt(
"admin_username",
help="Username of admin account, needed when "
"auto_refresh_endpoint set to True",
),
cfg.StrOpt(
"admin_password",
help="Password of admin account, needed when "
"auto_refresh_endpoint set to True",
),
cfg.StrOpt(
"admin_tenant",
help="Tenant name of admin account, needed when "
"auto_refresh_endpoint set to True",
),
cfg.StrOpt(
"admin_user_domain_name",
default="Default",
help="User domain name of admin account, needed when "
"auto_refresh_endpoint set to True",
),
cfg.StrOpt(
"admin_project_domain_name",
default="Default",
help="Project domain name of admin account, needed when "
"auto_refresh_endpoint set to True",
),
]
# OpenStack credentials used for Endpoint Cache
endpoint_cache_opts = [
cfg.StrOpt("auth_uri", help="Keystone authorization url"),
cfg.StrOpt("auth_plugin", help="Name of the plugin to load"),
cfg.StrOpt("username", help="Username of account"),
cfg.StrOpt("password", secret=True, help="Password of account"),
cfg.StrOpt("project_name", help="Project name of account"),
cfg.StrOpt(
"user_domain_name", default="Default", help="User domain name of account"
),
cfg.StrOpt(
"project_domain_name", default="Default", help="Project domain name of account"
),
cfg.IntOpt(
"http_connect_timeout",
help="Request timeout value for communicating with Identity" " API server.",
),
]
scheduler_opts = [
cfg.BoolOpt(
"periodic_enable",
default=True,
help="Boolean value to enable or disable periodic tasks",
),
cfg.IntOpt(
"dcagent_audit_interval",
default=30,
help="Periodic time interval for subcloud audit",
),
]
common_opts = [
cfg.IntOpt("workers", default=1, help="Number of workers"),
cfg.StrOpt("host", default="localhost", help="Hostname of the machine"),
]
scheduler_opt_group = cfg.OptGroup(
name="scheduler", title="Scheduler options for periodic job"
)
keystone_opt_group = cfg.OptGroup(name="keystone_authtoken", title="Keystone options")
# The group stores the pecan configurations.
pecan_group = cfg.OptGroup(name="pecan", title="Pecan options")
cache_opt_group = cfg.OptGroup(name="cache", title="OpenStack Credentials")
endpoint_cache_opt_group = cfg.OptGroup(
name="endpoint_cache", title="OpenStack Credentials"
)
def list_opts():
yield cache_opt_group.name, cache_opts
yield endpoint_cache_opt_group.name, endpoint_cache_opts
yield scheduler_opt_group.name, scheduler_opts
yield pecan_group.name, pecan_opts
yield None, common_opts
def register_options():
for group, opts in list_opts():
cfg.CONF.register_opts(opts, group=group)

View File

@ -0,0 +1,148 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from oslo_context import context as base_context
from oslo_utils import encodeutils
import pecan
from pecan import hooks
from dcagent.api.policies import base as base_policy
from dcagent.api import policy
ALLOWED_WITHOUT_AUTH = "/"
class RequestContext(base_context.RequestContext):
"""Stores information about the security context.
The context encapsulates information related to the user accessing the
the system, as well as additional request information.
"""
def __init__(
self,
auth_token=None,
user=None,
project=None,
domain=None,
user_domain=None,
project_domain=None,
is_admin=None,
read_only=False,
show_deleted=False,
request_id=None,
auth_url=None,
trusts=None,
user_name=None,
project_name=None,
domain_name=None,
user_domain_name=None,
project_domain_name=None,
auth_token_info=None,
region_name=None,
roles=None,
password=None,
**kwargs,
):
# Initializer of request context.
# We still have 'tenant' param because oslo_context still use it.
# pylint: disable=E1123
super(RequestContext, self).__init__(
auth_token=auth_token,
user=user,
tenant=project,
domain=domain,
user_domain=user_domain,
project_domain=project_domain,
roles=roles,
read_only=read_only,
show_deleted=show_deleted,
request_id=request_id,
)
# request_id might be a byte array
self.request_id = encodeutils.safe_decode(self.request_id)
# we save an additional 'project' internally for use
self.project = project
self.auth_url = auth_url
self.trusts = trusts
self.user_name = user_name
self.project_name = project_name
self.domain_name = domain_name
self.user_domain_name = user_domain_name
self.project_domain_name = project_domain_name
self.auth_token_info = auth_token_info
self.region_name = region_name
self.roles = roles or []
self.password = password
# Check user is admin or not
if is_admin is None:
self.is_admin = policy.authorize(
base_policy.ADMIN_IN_SYSTEM_PROJECTS, {}, self.to_dict(), do_raise=False
)
else:
self.is_admin = is_admin
def to_dict(self):
return {
"auth_url": self.auth_url,
"auth_token": self.auth_token,
"auth_token_info": self.auth_token_info,
"user": self.user,
"user_name": self.user_name,
"user_domain": self.user_domain,
"user_domain_name": self.user_domain_name,
"project": self.project,
"project_name": self.project_name,
"project_domain": self.project_domain,
"project_domain_name": self.project_domain_name,
"domain": self.domain,
"domain_name": self.domain_name,
"trusts": self.trusts,
"region_name": self.region_name,
"roles": self.roles,
"show_deleted": self.show_deleted,
"is_admin": self.is_admin,
"request_id": self.request_id,
"password": self.password,
}
@classmethod
def from_dict(cls, values):
return cls(**values)
def get_admin_context(show_deleted=False):
return RequestContext(is_admin=True, show_deleted=show_deleted)
def get_service_context(**args):
"""An abstraction layer for getting service context."""
pass
class AuthHook(hooks.PecanHook):
def before(self, state):
if state.request.path == ALLOWED_WITHOUT_AUTH:
return
req = state.request
identity_status = req.headers.get("X-Identity-Status")
service_identity_status = req.headers.get("X-Service-Identity-Status")
if identity_status == "Confirmed" or service_identity_status == "Confirmed":
return
if req.headers.get("X-Auth-Token"):
msg = f"Auth token is invalid: {req.headers['X-Auth-Token']}"
else:
msg = "Authentication required"
msg = f"Failed to validate access token: {msg}"
pecan.abort(status_code=401, detail=msg)

View File

@ -0,0 +1,42 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Dcagent base exception handling.
"""
from oslo_utils import excutils
from dcdbsync.common.i18n import _
class DcagentException(Exception):
"""Base dcagent Exception.
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
def __init__(self, **kwargs):
try:
super(DcagentException, self).__init__(self.message % kwargs)
self.msg = self.message % kwargs
except Exception:
with excutils.save_and_reraise_exception() as ctxt:
if not self.use_fatal_exceptions():
ctxt.reraise = False
# at least get the core message out if something happened
super(DcagentException, self).__init__(self.message)
def use_fatal_exceptions(self):
return False
class UnsupportedAudit(DcagentException):
message = _("Requested audit %(audit)s is not supported.")

View File

@ -0,0 +1,12 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import oslo_i18n
_translators = oslo_i18n.TranslatorFactory(domain="dcagent")
# The primary translation function using the well-known name "_"
_ = _translators.primary

View File

@ -0,0 +1,235 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import threading
from keystoneauth1.identity import v3
from keystoneauth1 import session
from keystoneclient.v3 import client as ks_client
from oslo_config import cfg
from oslo_log import log as logging
from tsconfig import tsconfig as tsc
from dccommon import consts as dccommon_consts
from dccommon.drivers.openstack.fm import FmClient
from dccommon.drivers.openstack.software_v1 import SoftwareClient
from dccommon.drivers.openstack.sysinv_v1 import SysinvClient
from dccommon.utils import is_token_expiring_soon
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# TODO(vgluzrom): Implement lru_cache from functools to handle described case
def cache_wrapper(cls):
"""Decorator to cache the results of the methods in the class.
Note: This decorator only caches the results based on the function name.
It cannot handle the case where the same function is called with different
arguments and the result is different.
"""
def wrap_method(method):
def wrapper(self, *args, **kwargs):
# Return the cached result if available
use_cache = getattr(self, "use_cache", False)
if use_cache and method.__name__ in self.__class__._results:
LOG.debug(
f"Returning cached response for {method.__name__} "
f"from {self.__class__.__name__}"
)
return self.__class__._results[method.__name__]
result = method(self, *args, **kwargs)
# Cache the results in the '_result' class variable
LOG.debug(
f"Saving new response for {method.__name__} "
f"in {self.__class__.__name__}"
)
with self.__class__._lock:
self.__class__._results[method.__name__] = result
return result
return wrapper
# Apply the wrapper to all non private functions in the class
for attr_name in dir(cls):
if not attr_name.startswith("_"):
attr = getattr(cls, attr_name)
if callable(attr):
setattr(cls, attr_name, wrap_method(attr))
return cls
@cache_wrapper
class CachedSysinvClient(SysinvClient):
_results = {}
_lock = threading.Lock()
@cache_wrapper
class CachedFmClient(FmClient):
_results = {}
_lock = threading.Lock()
@cache_wrapper
class CachedSoftwareClient(SoftwareClient):
_results = {}
_lock = threading.Lock()
class KeystoneCache(object):
"""Simple cache to store the subcloud keystone token and client/session."""
subcloud_keystone_client: ks_client = None
subcloud_token = {}
def __init__(self):
if not KeystoneCache.subcloud_keystone_client:
self.initialize_keystone_client()
if is_token_expiring_soon(KeystoneCache.subcloud_token):
self.clear_subcloud_keystone_data()
self.initialize_keystone_client()
@staticmethod
def get_admin_session(
auth_url: str,
user_name: str,
user_domain_name: str,
user_password: str,
user_project: str,
user_project_domain: str,
timeout: float = None,
) -> session.Session:
"""Get the admin session.
:param auth_url: The authentication URL.
:type auth_url: str
:param user_name: The user name.
:type user_name: str
:param user_domain_name: The user domain name.
:type user_domain_name: str
:param user_password: The user password.
:type user_password: str
:param user_project: The user project.
:type user_project: str
:param user_project_domain: The user project domain.
:type user_project_domain: str
:param timeout: The timeout.
:type timeout: int
:return: The admin session.
:rtype: session.Session
"""
user_auth = v3.Password(
auth_url=auth_url,
username=user_name,
user_domain_name=user_domain_name,
password=user_password,
project_name=user_project,
project_domain_name=user_project_domain,
include_catalog=True,
)
timeout = (
CONF.endpoint_cache.http_connect_timeout if timeout is None else timeout
)
return session.Session(
auth=user_auth,
additional_headers=dccommon_consts.USER_HEADER,
timeout=timeout,
)
@staticmethod
def get_keystone_client(keystone_session: session.Session) -> ks_client:
"""Get the keystone client.
:param keystone_session: subcloud keystone session
:type keystone_session: session.Session
:return: subcloud keystone client
:rtype: ks_client
"""
return ks_client.Client(session=keystone_session)
@staticmethod
def get_subcloud_token(subcloud_keystone_client: ks_client = None):
"""Get the subcloud token.
:param subcloud_keystone_client: The subcloud keystone client.
:type subcloud_keystone_client: ks_client
:return: The subcloud token.
"""
subcloud_keystone_client = (
subcloud_keystone_client
if subcloud_keystone_client
else KeystoneCache.subcloud_keystone_client
)
return subcloud_keystone_client.tokens.validate(
subcloud_keystone_client.session.get_token(),
include_catalog=False,
)
@classmethod
def initialize_keystone_client(cls):
"""Initialize the keystone client and token for the subcloud."""
subcloud_keystone_client = cls.get_keystone_client(
cls.get_admin_session(
CONF.endpoint_cache.auth_uri,
CONF.endpoint_cache.username,
CONF.endpoint_cache.user_domain_name,
CONF.endpoint_cache.password,
CONF.endpoint_cache.project_name,
CONF.endpoint_cache.project_domain_name,
timeout=CONF.endpoint_cache.http_connect_timeout,
)
)
subcloud_token = cls.get_subcloud_token(subcloud_keystone_client)
cls.set_subcloud_keystone_data(subcloud_keystone_client, subcloud_token)
@staticmethod
def set_subcloud_keystone_data(keystone_client: ks_client, keystone_token: dict):
KeystoneCache.subcloud_keystone_client = keystone_client
KeystoneCache.subcloud_token = keystone_token
@staticmethod
def clear_subcloud_keystone_data():
KeystoneCache.subcloud_keystone_client = None
KeystoneCache.subcloud_token = {}
class BaseAuditManager(object):
def __init__(self):
self.keystone_client = None
self.sysinv_client = None
self.fm_client = None
self.software_client = None
def initialize_clients(self, use_cache: bool = True):
region_name = tsc.region_1_name
self.keystone_client = KeystoneCache().subcloud_keystone_client
auth_session = self.keystone_client.session
self.sysinv_client = CachedSysinvClient(
region_name,
auth_session,
endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL,
)
self.fm_client = CachedFmClient(
region_name,
auth_session,
endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL,
)
self.software_client = CachedSoftwareClient(
region_name,
auth_session,
endpoint_type=dccommon_consts.KS_ENDPOINT_INTERNAL,
)
self.sysinv_client.use_cache = use_cache
self.fm_client.use_cache = use_cache
self.software_client.use_cache = use_cache
return self.sysinv_client, self.fm_client, self.software_client

View File

@ -0,0 +1,33 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import pbr.version
DCAGENT_VENDOR = "Wind River Systems"
DCAGENT_PRODUCT = "Distributed Cloud DC Agent"
DCAGENT_PACKAGE = None # OS distro package version suffix
version_info = pbr.version.VersionInfo("distributedcloud")
version_string = version_info.version_string
def vendor_string():
return DCAGENT_VENDOR
def product_string():
return DCAGENT_PRODUCT
def package_string():
return DCAGENT_PACKAGE
def version_string_with_package():
if package_string() is None:
return version_info.version_string()
else:
return "%s-%s" % (version_info.version_string(), package_string())

View File

@ -0,0 +1,161 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import http.client
import json
import mock
from dcagent.common import utils
from dcagent.tests.api.test_root_controller import DCAgentApiTest
from dccommon import consts as dccommon_consts
from dcmanager.audit.alarm_aggregation import AlarmAggregation
from dcmanager.audit import base_audit
from dcmanager.audit.firmware_audit import FirmwareAudit
from dcmanager.audit.kube_rootca_update_audit import KubeRootcaUpdateAudit
from dcmanager.audit.kubernetes_audit import KubernetesAudit
from dcmanager.audit.software_audit import SoftwareAudit
class BaseTestAudit(DCAgentApiTest):
def setUp(self):
super().setUp()
self.url = "/v1/dcaudit"
self.method = self.app.patch_json
self._mock_keystone_cache(utils)
self._mock_fm_client(utils)
self._mock_software_client(utils)
self._mock_sysinv_client(utils)
class TestAuditController(BaseTestAudit):
"""Test class for Audit Controller"""
def setUp(self):
super().setUp()
def test_unmapped_method(self):
"""Test requesting an unmapped method results in success with null content"""
self.method = self.app.put
response = self._send_request()
self._assert_response(response)
self.assertEqual(response.text, "null")
class TestAuditPatch(BaseTestAudit):
def setUp(self):
super().setUp()
@mock.patch.object(json, "loads")
def test_patch_audit_bad_request_on_decoding_error(self, mock_json_loads):
self.params = "{bad json"
mock_json_loads.side_effect = ValueError()
response = self._send_request()
error_msg = "Request body decoding error"
self._assert_pecan_and_response(response, http.client.BAD_REQUEST, error_msg)
def test_patch_audit_bad_request_on_empty_body(self):
self.params = {}
response = self._send_request()
error_msg = "Body required"
self._assert_pecan_and_response(response, http.client.BAD_REQUEST, error_msg)
@mock.patch("dcagent.common.audit_manager.RequestedAudit.get_sync_status")
def test_patch_audit_internal_server_error_on_exception(self, mock_get_sync_status):
exception_msg = "Test Error"
mock_get_sync_status.side_effect = Exception(exception_msg)
error_response = f"Unable to get audit info: {exception_msg}"
self.params = {"use_cache": True}
response = self._send_request()
self._assert_pecan_and_response(
response, http.client.INTERNAL_SERVER_ERROR, error_response
)
class TestRequestedAudit(BaseTestAudit):
def setUp(self):
super().setUp()
mock_availability = mock.patch.object(
base_audit, "get_subcloud_availability_status"
)
self.mock_availability = mock_availability.start()
self.addCleanup(mock_availability.stop)
mock_alarm_aggregation = mock.patch.object(
AlarmAggregation, "get_alarm_summary"
)
self.mock_alarm_aggregation = mock_alarm_aggregation.start()
self.addCleanup(mock_alarm_aggregation.stop)
mock_software_audit = mock.patch.object(
SoftwareAudit, "get_subcloud_sync_status"
)
self.mock_software_audit = mock_software_audit.start()
self.addCleanup(mock_software_audit.stop)
mock_firmware_audit = mock.patch.object(
FirmwareAudit, "get_subcloud_sync_status"
)
self.mock_firmware_audit = mock_firmware_audit.start()
self.addCleanup(mock_firmware_audit.stop)
mock_kubernetes_audit = mock.patch.object(
KubernetesAudit, "get_subcloud_sync_status"
)
self.mock_kubernetes_audit = mock_kubernetes_audit.start()
self.addCleanup(mock_kubernetes_audit.stop)
mock_kube_rootca_audit = mock.patch.object(
KubeRootcaUpdateAudit, "get_subcloud_sync_status"
)
self.mock_kube_rootca_audit = mock_kube_rootca_audit.start()
self.addCleanup(mock_kube_rootca_audit.stop)
def test_get_sync_status(self):
# Mock responses for the external dependencies
self.mock_availability.return_value = ["online", []]
self.mock_alarm_aggregation.return_value = "test_alarm_summary"
self.mock_software_audit.return_value = "software_audit_response"
self.mock_firmware_audit.return_value = "firmware_audit_response"
self.mock_kubernetes_audit.return_value = "kubernetes_audit_response"
self.mock_kube_rootca_audit.return_value = "kube_rootca_audit_response"
self.params = {
dccommon_consts.BASE_AUDIT: "",
dccommon_consts.FIRMWARE_AUDIT: "regionone_data_firmware",
dccommon_consts.KUBE_ROOTCA_AUDIT: "regionone_data_kube_rootca",
dccommon_consts.KUBERNETES_AUDIT: "regionone_data_kubernetes",
dccommon_consts.SOFTWARE_AUDIT: "regionone_data_software",
}
response = self._send_request()
# Verify results
expected_result = {
dccommon_consts.BASE_AUDIT: {
"availability": "online",
"inactive_sg": [],
"alarms": "test_alarm_summary",
},
dccommon_consts.FIRMWARE_AUDIT: "firmware_audit_response",
dccommon_consts.KUBE_ROOTCA_AUDIT: "kube_rootca_audit_response",
dccommon_consts.KUBERNETES_AUDIT: "kubernetes_audit_response",
dccommon_consts.SOFTWARE_AUDIT: "software_audit_response",
}
self._assert_response(
response,
expected_response_text=json.dumps(expected_result, sort_keys=False),
)
def test_get_sync_status_unsuported_audit(self):
self.params = {"fake_audit": ""}
error_msg = "Requested audit fake_audit is not supported."
response = self._send_request()
self._assert_pecan_and_response(response, http.client.BAD_REQUEST, error_msg)

View File

@ -0,0 +1,239 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import http.client
import uuid
import mock
from oslo_config import cfg
from oslo_config import fixture as fixture_config
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
import pecan
from pecan.configuration import set_config
from pecan.testing import load_test_app
from dcagent.api import api_config
from dcagent.common import config
from dcagent.tests.base import DCAgentTestCase
from dcagent.tests.common import consts as test_consts
config.register_options()
OPT_GROUP_NAME = "keystone_authtoken"
cfg.CONF.import_group(OPT_GROUP_NAME, "keystonemiddleware.auth_token")
class DCAgentApiTest(DCAgentTestCase):
def setUp(self):
super().setUp()
self.addCleanup(set_config, {}, overwrite=True)
api_config.test_init()
config_fixture = fixture_config.Config()
self.CONF = self.useFixture(config_fixture).conf
config_fixture.set_config_dirs([])
self.CONF.set_override("auth_strategy", "noauth")
self.app = self._make_app()
self._mock_pecan()
self.url = "/"
# The put method is used as a default value, leading to the generic
# implementation on controllers in case the method is not specified
self.method = self.app.put
self.params = {}
self.upload_files = None
self.verb = None
self.headers = {
"X-Tenant-Id": str(uuid.uuid4()),
"X_ROLE": "admin,member,reader",
"X-Identity-Status": "Confirmed",
"X-Project-Name": "admin",
}
def _make_app(self, enable_acl=False):
self.config_fixture = {
"app": {
"root": "dcagent.api.controllers.root.RootController",
"modules": ["dcagent.api"],
"enable_acl": enable_acl,
"errors": {400: "/error", "__force_dict__": True},
},
}
return load_test_app(self.config_fixture)
def _send_request(self):
"""Send a request to a url"""
kwargs = {}
if self.upload_files:
kwargs = {"upload_files": self.upload_files}
return self.method(
self.url,
headers=self.headers,
params=self.params,
expect_errors=True,
**kwargs,
)
def _assert_response(
self,
response,
status_code=http.client.OK,
content_type=test_consts.APPLICATION_JSON,
expected_response_text=None,
):
"""Assert the response for a request"""
self.assertEqual(response.status_code, status_code)
self.assertEqual(response.content_type, content_type)
if expected_response_text:
self.assertEqual(response.text, expected_response_text)
def _mock_pecan(self):
"""Mock pecan's abort"""
mock_patch_object = mock.patch.object(pecan, "abort", wraps=pecan.abort)
self.mock_pecan_abort = mock_patch_object.start()
self.addCleanup(mock_patch_object.stop)
def _assert_pecan(self, http_status, content=None, call_count=1):
"""Assert pecan was called with the correct arguments"""
self.assertEqual(self.mock_pecan_abort.call_count, call_count)
if content:
self.mock_pecan_abort.assert_called_with(http_status, content)
else:
self.mock_pecan_abort.assert_called_with(http_status)
def _assert_pecan_and_response(
self,
response,
http_status,
content=None,
call_count=1,
content_type=test_consts.TEXT_PLAIN,
):
"""Assert the response and pecan abort for a failed request"""
self._assert_pecan(http_status, content, call_count=call_count)
self._assert_response(response, http_status, content_type)
def tearDown(self):
super(DCAgentApiTest, self).tearDown()
pecan.set_config({}, overwrite=True)
class TestRootController(DCAgentApiTest):
"""Test version listing on root URI."""
def setUp(self):
super(TestRootController, self).setUp()
self.url = "/"
self.method = self.app.get
def _test_method_returns_405(self, method, content_type=test_consts.TEXT_PLAIN):
self.method = method
response = self._send_request()
self._assert_pecan_and_response(
response, http.client.METHOD_NOT_ALLOWED, content_type=content_type
)
def test_get(self):
"""Test get request succeeds with correct versions"""
response = self._send_request()
self._assert_response(response)
json_body = jsonutils.loads(response.body)
versions = json_body.get("versions")
self.assertEqual(1, len(versions))
def test_request_id(self):
"""Test request for root returns the correct request id"""
response = self._send_request()
self._assert_response(response)
self.assertIn("x-openstack-request-id", response.headers)
self.assertTrue(response.headers["x-openstack-request-id"].startswith("req-"))
id_part = response.headers["x-openstack-request-id"].split("req-")[1]
self.assertTrue(uuidutils.is_uuid_like(id_part))
def test_post(self):
"""Test post request is not allowed on root"""
self._test_method_returns_405(self.app.post)
def test_put(self):
"""Test put request is not allowed on root"""
self._test_method_returns_405(self.app.put)
def test_patch(self):
"""Test patch request is not allowed on root"""
self._test_method_returns_405(self.app.patch)
def test_delete(self):
"""Test delete request is not allowed on root"""
self._test_method_returns_405(self.app.delete)
def test_head(self):
"""Test head request is not allowed on root"""
self._test_method_returns_405(self.app.head, content_type=test_consts.TEXT_HTML)
class TestErrors(DCAgentApiTest):
def setUp(self):
super(TestErrors, self).setUp()
cfg.CONF.set_override("admin_tenant", "fake_tenant_id", group="cache")
def test_404(self):
self.url = "/assert_called_once"
self.method = self.app.get
response = self._send_request()
self._assert_response(
response, http.client.NOT_FOUND, content_type=test_consts.TEXT_PLAIN
)
def test_version_1_root_controller(self):
self.url = f"/v1/{uuidutils.generate_uuid()}/bad_method"
self.method = self.app.patch
response = self._send_request()
self._assert_pecan_and_response(response, http.client.NOT_FOUND)
class TestKeystoneAuth(DCAgentApiTest):
"""Test requests using keystone as the authentication strategy"""
def setUp(self):
super(TestKeystoneAuth, self).setUp()
cfg.CONF.set_override("auth_strategy", "keystone")
self.method = self.app.get
def test_auth_not_enforced_for_root(self):
"""Test authentication is not enforced for root url"""
response = self._send_request()
self._assert_response(response)

View File

@ -0,0 +1,43 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import mock
from oslotest import base
class DCAgentTestCase(base.BaseTestCase):
"""Test case base class for all unit tests."""
def setUp(self):
super().setUp()
def _mock_sysinv_client(self, target):
"""Mock the target's SysinvClient"""
mock_patch_object = mock.patch.object(target, "CachedSysinvClient")
self.mock_sysinv_client = mock_patch_object.start()
self.addCleanup(mock_patch_object.stop)
def _mock_fm_client(self, target):
"""Mock the target's FmClient"""
mock_patch_object = mock.patch.object(target, "CachedFmClient")
self.mock_fm_client = mock_patch_object.start()
self.addCleanup(mock_patch_object.stop)
def _mock_software_client(self, target):
"""Mock the target's SoftwareClient"""
mock_patch_object = mock.patch.object(target, "CachedSoftwareClient")
self.mock_software_client = mock_patch_object.start()
self.addCleanup(mock_patch_object.stop)
def _mock_keystone_cache(self, target):
"""Mock the target's KeystoneClient"""
mock_patch_object = mock.patch.object(target, "KeystoneCache")
self.mock_keystone_client = mock_patch_object.start()
self.addCleanup(mock_patch_object.stop)

View File

@ -0,0 +1,10 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# Content-type
TEXT_PLAIN = "text/plain"
TEXT_HTML = "text/html"
APPLICATION_JSON = "application/json"

View File

@ -126,8 +126,22 @@ ENDPOINT_AUDIT_REQUESTS = {
ENDPOINT_TYPE_SOFTWARE: "spare_audit_requested",
}
BASE_AUDIT = "base_audit"
FIRMWARE_AUDIT = "firmware_audit"
KUBERNETES_AUDIT = "kubernetes_audit"
KUBE_ROOTCA_AUDIT = "kube_rootca_audit"
SOFTWARE_AUDIT = "software_audit"
SKIP_AUDIT = "skip"
DCAGENT_ENDPOINT_TYPE_MAP = {
FIRMWARE_AUDIT: ENDPOINT_TYPE_FIRMWARE,
KUBERNETES_AUDIT: ENDPOINT_TYPE_KUBERNETES,
KUBE_ROOTCA_AUDIT: ENDPOINT_TYPE_KUBE_ROOTCA,
SOFTWARE_AUDIT: ENDPOINT_TYPE_SOFTWARE,
}
MIN_VERSION_FOR_DCAGENT = "24.09"
# Well known region names
SYSTEM_CONTROLLER_NAME = "SystemController"
DEFAULT_REGION_NAME = "RegionOne"

View File

@ -0,0 +1,53 @@
#
# Copyright (c) 2024 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from keystoneauth1.session import Session as keystone_session
from oslo_log import log
import requests
from dccommon import consts
from dccommon.drivers import base
LOG = log.getLogger(__name__)
DCAGENT_REST_DEFAULT_TIMEOUT = 900
class DcagentClient(base.DriverBase):
"""Dcagent V1 driver."""
def __init__(
self,
region: str,
session: keystone_session,
endpoint: str = None,
):
# Get an endpoint and token.
if endpoint is None:
self.endpoint = session.get_endpoint(
service_type="dcagent",
region_name=region,
interface=consts.KS_ENDPOINT_ADMIN,
)
else:
self.endpoint = endpoint
self.token = session.get_token()
def audit(self, audit_data, timeout=DCAGENT_REST_DEFAULT_TIMEOUT):
"""Audit subcloud"""
url = self.endpoint + "/v1/dcaudit"
headers = {"X-Auth-Token": self.token}
response = requests.patch(
url, headers=headers, json=audit_data, timeout=timeout
)
if response.status_code == 200:
return response.json()
message = f"Audit request failed with RC: {response.status_code}"
LOG.error(message)
raise Exception(message)

View File

@ -44,6 +44,7 @@ LOG = logging.getLogger(__name__)
LOCK_NAME = "dc-keystone-endpoint-cache"
ENDPOINT_URLS = {
"dcagent": "https://{}:8326",
"fm": "https://{}:18003",
"keystone": "https://{}:5001/v3",
"patching": "https://{}:5492",

View File

@ -185,6 +185,7 @@ class EndpointCacheTest(base.DCCommonTestCase):
def _get_expected_endpoints(self, ip: str) -> dict:
ip_with_brackets = f"[{ip}]" if netaddr.IPAddress(ip).version == 6 else ip
return {
"dcagent": f"https://{ip_with_brackets}:8326",
"fm": f"https://{ip_with_brackets}:18003",
"keystone": f"https://{ip_with_brackets}:5001/v3",
"patching": f"https://{ip_with_brackets}:5492",

View File

@ -7,6 +7,7 @@ import sys
modules = [
"dccommon",
"dcdbsync",
"dcagent",
"dcorch",
"dcmanager/api",
"dcmanager/audit",
@ -22,6 +23,7 @@ formatted_modules = [
"dccommon",
"dcdbsync",
"dcorch",
"dcagent",
"dcmanager/api",
]

View File

@ -88,7 +88,7 @@ setenv =
{[testenv]setenv}
PYTHONPATH = {toxinidir}
commands =
pylint {posargs} dccommon dcdbsync dcmanager dcorch --rcfile=./.pylintrc
pylint {posargs} dccommon dcdbsync dcmanager dcorch dcagent --rcfile=./.pylintrc
[testenv:black]
# This environment checks and displays the recommended changes by Black for formatting