Apply black formatter to dcorch/api

This commit applies the Black format to the `dcorch/api`
files to ensure that it adheres to the Black code style guidelines.

Test Plan:
PASS: Success in stx-distcloud-tox-black

Story: 2011149
Task: 50445

Change-Id: I9e5bd8ca9dd33a7cdd2ce36e8dcbd4be65c436d7
Signed-off-by: Hugo Brito <hugo.brito@windriver.com>
This commit is contained in:
Hugo Brito
2024-06-26 15:02:12 -03:00
committed by Hugo Nicodemos
parent 686abd1cbc
commit 56e359bf8a
21 changed files with 813 additions and 746 deletions

View File

@@ -1,4 +1,5 @@
# Copyright 2015 Huawei Technologies Co., Ltd. # Copyright 2015 Huawei Technologies Co., Ltd.
# Copyright (c) 2024 Wind River Systems, Inc.
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -30,29 +31,36 @@ from dcorch.common import version
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
common_opts = [ common_opts = [
cfg.StrOpt('bind_host', default='0.0.0.0', cfg.StrOpt("bind_host", default="0.0.0.0", help=_("The host IP to bind to")),
help=_("The host IP to bind to")), cfg.IntOpt("bind_port", default=8118, help=_("The port to bind to")),
cfg.IntOpt('bind_port', default=8118, cfg.IntOpt("api_workers", default=2, help=_("number of api workers")),
help=_("The port to bind to")), cfg.StrOpt(
cfg.IntOpt('api_workers', default=2, "state_path",
help=_("number of api workers")), default=os.path.join(os.path.dirname(__file__), "../"),
cfg.StrOpt('state_path', help="Top-level directory for maintaining dcorch state",
default=os.path.join(os.path.dirname(__file__), '../'), ),
help='Top-level directory for maintaining dcorch state'), cfg.StrOpt(
cfg.StrOpt('api_extensions_path', default="", "api_extensions_path", default="", help=_("The path for API extensions")
help=_("The path for API extensions")), ),
cfg.StrOpt('auth_strategy', default='keystone', cfg.StrOpt(
help=_("The type of authentication to use")), "auth_strategy", default="keystone", help=_("The type of authentication to use")
cfg.BoolOpt('allow_bulk', default=True, ),
help=_("Allow the usage of the bulk API")), cfg.BoolOpt("allow_bulk", default=True, help=_("Allow the usage of the bulk API")),
cfg.BoolOpt('allow_pagination', default=False, cfg.BoolOpt(
help=_("Allow the usage of the pagination")), "allow_pagination", default=False, help=_("Allow the usage of the pagination")
cfg.BoolOpt('allow_sorting', default=False, ),
help=_("Allow the usage of the sorting")), cfg.BoolOpt(
cfg.StrOpt('pagination_max_limit', default="-1", "allow_sorting", default=False, help=_("Allow the usage of the sorting")
help=_("The maximum number of items returned in a single " ),
"response, value was 'infinite' or negative integer " cfg.StrOpt(
"means no limit")), "pagination_max_limit",
default="-1",
help=_(
"The maximum number of items returned in a single "
"response, value was 'infinite' or negative integer "
"means no limit"
),
),
] ]
@@ -64,9 +72,12 @@ def init(args, **kwargs):
# auth.register_conf_options(cfg.CONF) # auth.register_conf_options(cfg.CONF)
logging.register_options(cfg.CONF) logging.register_options(cfg.CONF)
cfg.CONF(args=args, project='dcorch', cfg.CONF(
version='%%(prog)s %s' % version.version_info.release_string(), args=args,
**kwargs) project="dcorch",
version="%%(prog)s %s" % version.version_info.release_string(),
**kwargs
)
def setup_logging(): def setup_logging():
@@ -74,9 +85,10 @@ def setup_logging():
product_name = "dcorch" product_name = "dcorch"
logging.setup(cfg.CONF, product_name) logging.setup(cfg.CONF, product_name)
LOG.info("Logging enabled!") LOG.info("Logging enabled!")
LOG.info("%(prog)s version %(version)s", LOG.info(
{'prog': sys.argv[0], "%(prog)s version %(version)s",
'version': version.version_info.release_string()}) {"prog": sys.argv[0], "version": version.version_info.release_string()},
)
LOG.debug("command line: %s", " ".join(sys.argv)) LOG.debug("command line: %s", " ".join(sys.argv))

View File

@@ -30,20 +30,14 @@ def setup_app(*args, **kwargs):
opts = cfg.CONF.pecan opts = cfg.CONF.pecan
config = { config = {
'server': { "server": {"port": cfg.CONF.bind_port, "host": cfg.CONF.bind_host},
'port': cfg.CONF.bind_port, "app": {
'host': cfg.CONF.bind_host "root": "dcorch.api.controllers.root.RootController",
}, "modules": ["dcorch.api"],
'app': {
'root': 'dcorch.api.controllers.root.RootController',
'modules': ['dcorch.api'],
"debug": opts.debug, "debug": opts.debug,
"auth_enable": opts.auth_enable, "auth_enable": opts.auth_enable,
'errors': { "errors": {400: "/error", "__force_dict__": True},
400: '/error', },
'__force_dict__': True
}
}
} }
pecan_config = pecan.configuration.conf_from_dict(config) pecan_config = pecan.configuration.conf_from_dict(config)
@@ -56,7 +50,7 @@ def setup_app(*args, **kwargs):
wrap_app=_wrap_app, wrap_app=_wrap_app,
force_canonical=False, force_canonical=False,
hooks=lambda: [ctx.AuthHook()], hooks=lambda: [ctx.AuthHook()],
guess_content_type_from_ext=True guess_content_type_from_ext=True,
) )
return app return app
@@ -74,10 +68,10 @@ def load_paste_app(app_name=None):
def _wrap_app(app): def _wrap_app(app):
app = request_id.RequestId(app) app = request_id.RequestId(app)
if cfg.CONF.pecan.auth_enable and cfg.CONF.auth_strategy == 'keystone': if cfg.CONF.pecan.auth_enable and cfg.CONF.auth_strategy == "keystone":
conf = dict(cfg.CONF.keystone_authtoken) conf = dict(cfg.CONF.keystone_authtoken)
# Change auth decisions of requests to the app itself. # Change auth decisions of requests to the app itself.
conf.update({'delay_auth_decision': True}) conf.update({"delay_auth_decision": True})
# NOTE: Policy enforcement works only if Keystone # NOTE: Policy enforcement works only if Keystone
# authentication is enabled. No support for other authentication # authentication is enabled. No support for other authentication
@@ -93,7 +87,7 @@ _launcher = None
def serve(api_service, conf, workers=1): def serve(api_service, conf, workers=1):
global _launcher global _launcher
if _launcher: if _launcher:
raise RuntimeError(_('serve() can only be called once')) raise RuntimeError(_("serve() can only be called once"))
_launcher = service.launch(conf, api_service, workers=workers) _launcher = service.launch(conf, api_service, workers=workers)

View File

@@ -1,5 +1,5 @@
# Copyright (c) 2015 Huawei Tech. Co., Ltd. # Copyright (c) 2015 Huawei Tech. Co., Ltd.
# Copyright (c) 2020-2022 Wind River Systems, Inc. # Copyright (c) 2020-2022, 2024 Wind River Systems, Inc.
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -21,24 +21,26 @@ import dcorch.common.context as k_context
def extract_context_from_environ(): def extract_context_from_environ():
context_paras = {'auth_token': 'HTTP_X_AUTH_TOKEN', context_paras = {
'user': 'HTTP_X_USER_ID', "auth_token": "HTTP_X_AUTH_TOKEN",
'project': 'HTTP_X_TENANT_ID', "user": "HTTP_X_USER_ID",
'user_name': 'HTTP_X_USER_NAME', "project": "HTTP_X_TENANT_ID",
'tenant_name': 'HTTP_X_PROJECT_NAME', "user_name": "HTTP_X_USER_NAME",
'domain': 'HTTP_X_DOMAIN_ID', "tenant_name": "HTTP_X_PROJECT_NAME",
'roles': 'HTTP_X_ROLE', "domain": "HTTP_X_DOMAIN_ID",
'user_domain': 'HTTP_X_USER_DOMAIN_ID', "roles": "HTTP_X_ROLE",
'project_domain': 'HTTP_X_PROJECT_DOMAIN_ID', "user_domain": "HTTP_X_USER_DOMAIN_ID",
'request_id': 'openstack.request_id'} "project_domain": "HTTP_X_PROJECT_DOMAIN_ID",
"request_id": "openstack.request_id",
}
environ = request.environ environ = request.environ
for key, val in context_paras.items(): for key, val in context_paras.items():
context_paras[key] = environ.get(val) context_paras[key] = environ.get(val)
role = environ.get('HTTP_X_ROLE') role = environ.get("HTTP_X_ROLE")
# context_paras['is_admin'] = role == 'admin' # context_paras['is_admin'] = role == 'admin'
# In order to work in TiC # In order to work in TiC
context_paras['is_admin'] = 'admin' in role.split(',') context_paras["is_admin"] = "admin" in role.split(",")
return k_context.RequestContext(**context_paras) return k_context.RequestContext(**context_paras)

View File

@@ -22,16 +22,16 @@ from dcorch.api.controllers.v1 import root as v1_root
class RootController(object): class RootController(object):
@pecan.expose('json') @pecan.expose("json")
def _lookup(self, version, *remainder): def _lookup(self, version, *remainder):
version = str(version) version = str(version)
minor_version = version[-1] minor_version = version[-1]
major_version = version[1] major_version = version[1]
remainder = remainder + (minor_version,) remainder = remainder + (minor_version,)
if major_version == '1': if major_version == "1":
return v1_root.Controller(), remainder return v1_root.Controller(), remainder
@pecan.expose(generic=True, template='json') @pecan.expose(generic=True, template="json")
def index(self): def index(self):
return { return {
"versions": [ "versions": [
@@ -40,19 +40,19 @@ class RootController(object):
"links": [ "links": [
{ {
"rel": "self", "rel": "self",
"href": pecan.request.application_url + "/v1.0/" "href": pecan.request.application_url + "/v1.0/",
} }
], ],
"id": "v1.0", "id": "v1.0",
"updated": "2016-03-07" "updated": "2016-03-07",
} }
] ]
} }
@index.when(method='POST') @index.when(method="POST")
@index.when(method='PUT') @index.when(method="PUT")
@index.when(method='DELETE') @index.when(method="DELETE")
@index.when(method='HEAD') @index.when(method="HEAD")
@index.when(method='PATCH') @index.when(method="PATCH")
def not_supported(self): def not_supported(self):
pecan.abort(405) pecan.abort(405)

View File

@@ -28,9 +28,8 @@ class Controller(object):
minor_version = remainder[-1] minor_version = remainder[-1]
remainder = remainder[:-1] remainder = remainder[:-1]
sub_controllers = dict() sub_controllers = dict()
if minor_version == '0': if minor_version == "0":
sub_controllers["subclouds"] = subcloud_manager.\ sub_controllers["subclouds"] = subcloud_manager.SubcloudController
SubcloudController
for name, ctrl in sub_controllers.items(): for name, ctrl in sub_controllers.items():
setattr(self, name, ctrl) setattr(self, name, ctrl)

View File

@@ -32,7 +32,7 @@ LOG = logging.getLogger(__name__)
class SubcloudController(object): class SubcloudController(object):
VERSION_ALIASES = { VERSION_ALIASES = {
'Newton': '1.0', "Newton": "1.0",
} }
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@@ -44,26 +44,24 @@ class SubcloudController(object):
version_cap = 1.0 version_cap = 1.0
return version_cap return version_cap
@expose(generic=True, template='json') @expose(generic=True, template="json")
def index(self): def index(self):
# Route the request to specific methods with parameters # Route the request to specific methods with parameters
pass pass
@index.when(method='POST', template='json') @index.when(method="POST", template="json")
def post(self, project): def post(self, project):
"""Sync resources present in one region to another region. """Sync resources present in one region to another region."""
"""
context = restcomm.extract_context_from_environ() context = restcomm.extract_context_from_environ()
payload = eval(request.body) payload = eval(request.body)
if not payload: if not payload:
pecan.abort(400, _('Body required')) pecan.abort(400, _("Body required"))
if not payload.get('subcloud'): if not payload.get("subcloud"):
pecan.abort(400, _('subcloud required')) pecan.abort(400, _("subcloud required"))
job_id = uuidutils.generate_uuid() job_id = uuidutils.generate_uuid()
return self._add_subcloud(job_id, payload, context) return self._add_subcloud(job_id, payload, context)
@index.when(method='delete', template='json') @index.when(method="delete", template="json")
def delete(self, project, subcloud): def delete(self, project, subcloud):
"""Delete the database entries of a given job_id. """Delete the database entries of a given job_id.
@@ -74,10 +72,10 @@ class SubcloudController(object):
context = restcomm.extract_context_from_environ() context = restcomm.extract_context_from_environ()
try: try:
self.rpc_client.del_subcloud(context, subcloud) self.rpc_client.del_subcloud(context, subcloud)
return {'deleted': {'subcloud': subcloud}} return {"deleted": {"subcloud": subcloud}}
except oslo_messaging.RemoteError as ex: except oslo_messaging.RemoteError as ex:
if ex.exc_type == 'SubcloudNotFound': if ex.exc_type == "SubcloudNotFound":
pecan.abort(404, _('Subcloud not found')) pecan.abort(404, _("Subcloud not found"))
def _add_subcloud(self, job_id, payload, context): def _add_subcloud(self, job_id, payload, context):
"""Make an rpc call to engine. """Make an rpc call to engine.
@@ -88,8 +86,8 @@ class SubcloudController(object):
:param context: context of the request. :param context: context of the request.
:param result: Result object to return an output. :param result: Result object to return an output.
""" """
name = payload['subcloud'] name = payload["subcloud"]
management_ip = payload['management_ip'] management_ip = payload["management_ip"]
version = '17.06' version = "17.06"
self.rpc_client.add_subcloud(context, name, version, management_ip) self.rpc_client.add_subcloud(context, name, version, management_ip)
return {'added': {'subcloud': name}} return {"added": {"subcloud": name}}

View File

@@ -1,5 +1,5 @@
# #
# Copyright (c) 2022 Wind River Systems, Inc. # Copyright (c) 2022, 2024 Wind River Systems, Inc.
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
@@ -10,6 +10,4 @@ from dcorch.api.policies import base
def list_rules(): def list_rules():
return itertools.chain( return itertools.chain(base.list_rules())
base.list_rules()
)

View File

@@ -1,28 +1,26 @@
# #
# Copyright (c) 2022 Wind River Systems, Inc. # Copyright (c) 2022, 2024 Wind River Systems, Inc.
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #
from oslo_policy import policy from oslo_policy import policy
ADMIN_IN_SYSTEM_PROJECTS = 'admin_in_system_projects' ADMIN_IN_SYSTEM_PROJECTS = "admin_in_system_projects"
READER_IN_SYSTEM_PROJECTS = 'reader_in_system_projects' READER_IN_SYSTEM_PROJECTS = "reader_in_system_projects"
base_rules = [ base_rules = [
policy.RuleDefault( policy.RuleDefault(
name=ADMIN_IN_SYSTEM_PROJECTS, name=ADMIN_IN_SYSTEM_PROJECTS,
check_str='role:admin and (project_name:admin or ' + check_str="role:admin and (project_name:admin or " + "project_name:services)",
'project_name:services)',
description="Base rule.", description="Base rule.",
), ),
policy.RuleDefault( policy.RuleDefault(
name=READER_IN_SYSTEM_PROJECTS, name=READER_IN_SYSTEM_PROJECTS,
check_str='role:reader and (project_name:admin or ' + check_str="role:reader and (project_name:admin or " + "project_name:services)",
'project_name:services)', description="Base rule.",
description="Base rule." ),
)
] ]

View File

@@ -36,22 +36,24 @@ def reset():
_ENFORCER = None _ENFORCER = None
def init(policy_file='policy.yaml'): def init(policy_file="policy.yaml"):
"""Init an Enforcer class. """Init an Enforcer class.
:param policy_file: Custom policy file to be used. :param policy_file: Custom policy file to be used.
:return: Returns a Enforcer instance. :return: Returns a Enforcer instance.
""" """
global _ENFORCER global _ENFORCER
if not _ENFORCER: if not _ENFORCER:
# https://docs.openstack.org/oslo.policy/latest/user/usage.html # https://docs.openstack.org/oslo.policy/latest/user/usage.html
_ENFORCER = policy.Enforcer(CONF, _ENFORCER = policy.Enforcer(
policy_file=policy_file, CONF,
default_rule='default', policy_file=policy_file,
use_conf=True, default_rule="default",
overwrite=True) use_conf=True,
overwrite=True,
)
_ENFORCER.register_defaults(controller_policies.list_rules()) _ENFORCER.register_defaults(controller_policies.list_rules())
return _ENFORCER return _ENFORCER
@@ -59,5 +61,6 @@ def init(policy_file='policy.yaml'):
def authorize(rule, target, creds, do_raise=True): def authorize(rule, target, creds, do_raise=True):
"""A wrapper around 'authorize' from 'oslo_policy.policy'.""" """A wrapper around 'authorize' from 'oslo_policy.policy'."""
init() init()
return _ENFORCER.authorize(rule, target, creds, do_raise=do_raise, return _ENFORCER.authorize(
exc=exc.HTTPForbidden) rule, target, creds, do_raise=do_raise, exc=exc.HTTPForbidden
)

View File

@@ -74,13 +74,16 @@ class Acceptor(Router):
for key, value in proxy_consts.COMPUTE_PATH_MAP.items(): for key, value in proxy_consts.COMPUTE_PATH_MAP.items():
for k, v in value.items(): for k, v in value.items():
self._add_resource(mapper, api_controller, v, k, self._add_resource(mapper, api_controller, v, k, CONF.type, key)
CONF.type, key)
self._add_resource(mapper, orch_controller, self._add_resource(
proxy_consts.QUOTA_DETAIL_PATHS, mapper,
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET, orch_controller,
CONF.type, method=['GET']) proxy_consts.QUOTA_DETAIL_PATHS,
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET,
CONF.type,
method=["GET"],
)
def add_platform_routes(self, app, conf, mapper): def add_platform_routes(self, app, conf, mapper):
api_controller = SysinvAPIController(app, conf) api_controller = SysinvAPIController(app, conf)
@@ -93,8 +96,7 @@ class Acceptor(Router):
for key, value in proxy_consts.CINDER_PATH_MAP.items(): for key, value in proxy_consts.CINDER_PATH_MAP.items():
for k, v in value.items(): for k, v in value.items():
self._add_resource(mapper, api_controller, v, k, self._add_resource(mapper, api_controller, v, k, CONF.type, key)
CONF.type, key)
def add_network_routes(self, app, conf, mapper): def add_network_routes(self, app, conf, mapper):
api_controller = NeutronAPIController(app, conf) api_controller = NeutronAPIController(app, conf)
@@ -103,10 +105,14 @@ class Acceptor(Router):
for key, value in proxy_consts.NEUTRON_PATH_MAP.items(): for key, value in proxy_consts.NEUTRON_PATH_MAP.items():
self._add_resource(mapper, api_controller, value, key, CONF.type) self._add_resource(mapper, api_controller, value, key, CONF.type)
self._add_resource(mapper, orch_controller, self._add_resource(
proxy_consts.NEUTRON_QUOTA_DETAIL_PATHS, mapper,
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET, orch_controller,
CONF.type, method=['GET']) proxy_consts.NEUTRON_QUOTA_DETAIL_PATHS,
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET,
CONF.type,
method=["GET"],
)
def add_patch_routes(self, app, conf, mapper): def add_patch_routes(self, app, conf, mapper):
api_controller = PatchAPIController(app, conf) api_controller = PatchAPIController(app, conf)
@@ -127,6 +133,9 @@ class VersionAcceptor(Router):
self._conf = conf self._conf = conf
mapper = routes.Mapper() mapper = routes.Mapper()
api_controller = VersionController(app, conf) api_controller = VersionController(app, conf)
mapper.connect(proxy_consts.VERSION_ROOT, controller=api_controller, mapper.connect(
conditions=dict(method=['GET'])) proxy_consts.VERSION_ROOT,
controller=api_controller,
conditions=dict(method=["GET"]),
)
super(VersionAcceptor, self).__init__(app, conf, mapper, app) super(VersionAcceptor, self).__init__(app, conf, mapper, app)

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
# Copyright 2017 Wind River # Copyright 2017, 2024 Wind River
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@@ -24,12 +24,12 @@ from dcorch.api.proxy.common import utils
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
dispatch_opts = [ dispatch_opts = [
cfg.StrOpt('remote_host', cfg.StrOpt(
default="192.168.204.2", "remote_host",
help='remote host for api proxy to forward the request'), default="192.168.204.2",
cfg.IntOpt('remote_port', help="remote host for api proxy to forward the request",
default=18774, ),
help='listen port for remote host'), cfg.IntOpt("remote_port", default=18774, help="listen port for remote host"),
] ]
CONF = cfg.CONF CONF = cfg.CONF
@@ -43,15 +43,15 @@ class APIDispatcher(object):
""" """
def __init__(self, app): def __init__(self, app):
self._remote_host, self._remote_port = \ self._remote_host, self._remote_port = utils.get_remote_host_port_options(CONF)
utils.get_remote_host_port_options(CONF)
self.app = app self.app = app
@webob.dec.wsgify @webob.dec.wsgify
def __call__(self, req): def __call__(self, req):
"""Route the incoming request to a remote host""" """Route the incoming request to a remote host"""
LOG.debug("APIDispatcher dispatch the request to remote host: (%s), " LOG.debug(
"port: (%d)" % (self._remote_host, self._remote_port)) "APIDispatcher dispatch the request to remote host: (%s), "
utils.set_request_forward_environ(req, self._remote_host, "port: (%d)" % (self._remote_host, self._remote_port)
self._remote_port) )
utils.set_request_forward_environ(req, self._remote_host, self._remote_port)
return self.app return self.app

View File

@@ -29,9 +29,11 @@ from dcorch.api.proxy.common import utils
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
filter_opts = [ filter_opts = [
cfg.StrOpt('user_header', cfg.StrOpt(
default=dccommon_consts.USER_HEADER_VALUE, "user_header",
help='An application specific header'), default=dccommon_consts.USER_HEADER_VALUE,
help="An application specific header",
),
] ]
CONF = cfg.CONF CONF = cfg.CONF
@@ -40,8 +42,7 @@ CONF.register_opts(filter_opts)
def is_load_import(content_type, url_path): def is_load_import(content_type, url_path):
if (content_type == "multipart/form-data" and if content_type == "multipart/form-data" and url_path == "/v1/loads/import_load":
url_path == "/v1/loads/import_load"):
return True return True
else: else:
return False return False
@@ -61,8 +62,7 @@ class ApiFiller(Middleware):
def __init__(self, app, conf): def __init__(self, app, conf):
self._default_dispatcher = Proxy() self._default_dispatcher = Proxy()
self._remote_host, self._remote_port = \ self._remote_host, self._remote_port = utils.get_remote_host_port_options(CONF)
utils.get_remote_host_port_options(CONF)
super(ApiFiller, self).__init__(app) super(ApiFiller, self).__init__(app)
@webob.dec.wsgify(RequestClass=Request) @webob.dec.wsgify(RequestClass=Request)
@@ -72,8 +72,7 @@ class ApiFiller(Middleware):
# 3 times the file size is needed: # 3 times the file size is needed:
# 2 times on webob temporary copies # 2 times on webob temporary copies
# 1 time on internal temporary copy to be shared with sysinv # 1 time on internal temporary copy to be shared with sysinv
if not utils.is_space_available("/scratch", if not utils.is_space_available("/scratch", 3 * req.content_length):
3 * req.content_length):
msg = _( msg = _(
"Insufficient space on /scratch for request %s, " "Insufficient space on /scratch for request %s, "
"/scratch must have at least %d bytes of free space. " "/scratch must have at least %d bytes of free space. "
@@ -84,10 +83,11 @@ class ApiFiller(Middleware):
raise webob.exc.HTTPInternalServerError(explanation=msg) raise webob.exc.HTTPInternalServerError(explanation=msg)
if ('HTTP_USER_HEADER' in req.environ and if (
req.environ['HTTP_USER_HEADER'] == CONF.user_header): "HTTP_USER_HEADER" in req.environ
utils.set_request_forward_environ(req, self._remote_host, and req.environ["HTTP_USER_HEADER"] == CONF.user_header
self._remote_port) ):
utils.set_request_forward_environ(req, self._remote_host, self._remote_port)
LOG.debug("Forward dcorch-engine request to the API service") LOG.debug("Forward dcorch-engine request to the API service")
return self._default_dispatcher return self._default_dispatcher
else: else:

View File

@@ -48,66 +48,75 @@ class ParseError(Middleware):
state = {} state = {}
def replacement_start_response(status, headers, exc_info=None): def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable. """Overrides the default response to make errors parsable."""
"""
try: try:
status_code = int(status.split(' ')[0]) status_code = int(status.split(" ")[0])
state['status_code'] = status_code state["status_code"] = status_code
except (ValueError, TypeError): # pragma: nocover except (ValueError, TypeError): # pragma: nocover
raise Exception(( raise Exception(
'ErrorDocumentMiddleware received an invalid ' ("ErrorDocumentMiddleware received an invalid status %s" % status)
'status %s' % status )
))
else: else:
if (state['status_code'] // 100) not in (2, 3): if (state["status_code"] // 100) not in (2, 3):
# Remove some headers so we can replace them later # Remove some headers so we can replace them later
# when we have the full error message and can # when we have the full error message and can
# compute the length. # compute the length.
headers = [(h, v) headers = [
for (h, v) in headers (h, v)
if h not in ('Content-Length', 'Content-Type') for (h, v) in headers
] if h not in ("Content-Length", "Content-Type")
]
# Save the headers in case we need to modify them. # Save the headers in case we need to modify them.
state['headers'] = headers state["headers"] = headers
return start_response(status, headers, exc_info) return start_response(status, headers, exc_info)
app_iter = self.app(environ, replacement_start_response) app_iter = self.app(environ, replacement_start_response)
if (state['status_code'] // 100) not in (2, 3): if (state["status_code"] // 100) not in (2, 3):
req = webob.Request(environ) req = webob.Request(environ)
if (req.accept.best_match(['application/json', 'application/xml']) == if (
'application/xml'): req.accept.best_match(["application/json", "application/xml"])
== "application/xml"
):
try: try:
# simple check xml is valid # simple check xml is valid
body = [et.ElementTree.tostring( body = [
et.ElementTree.fromstring('<error_message>' + et.ElementTree.tostring(
'\n'.join(app_iter) + et.ElementTree.fromstring(
'</error_message>'))] "<error_message>"
+ "\n".join(app_iter)
+ "</error_message>"
)
)
]
except et.ElementTree.ParseError as err: except et.ElementTree.ParseError as err:
LOG.error('Error parsing HTTP response: %s' % err) LOG.error("Error parsing HTTP response: %s" % err)
body = ['<error_message>%s' % state['status_code'] + body = [
'</error_message>'] "<error_message>%s" % state["status_code"] + "</error_message>"
state['headers'].append(('Content-Type', 'application/xml')) ]
state["headers"].append(("Content-Type", "application/xml"))
else: else:
app_iter = [i.decode('utf-8') for i in app_iter] app_iter = [i.decode("utf-8") for i in app_iter]
# Parse explanation field from webob.exc and add it as # Parse explanation field from webob.exc and add it as
# 'faulstring' to be processed by cgts-client # 'faulstring' to be processed by cgts-client
fault = None fault = None
app_data = '\n'.join(app_iter) app_data = "\n".join(app_iter)
for data in app_data.split("\n"): for data in app_data.split("\n"):
if WEBOB_EXPL_SEP in str(data): if WEBOB_EXPL_SEP in str(data):
# Remove separator, trailing and leading white spaces # Remove separator, trailing and leading white spaces
fault = str(data).replace(WEBOB_EXPL_SEP, "").strip() fault = str(data).replace(WEBOB_EXPL_SEP, "").strip()
break break
if fault is None: if fault is None:
body = [json.dumps({'error_message': app_data})] body = [json.dumps({"error_message": app_data})]
else: else:
body = [json.dumps({'error_message': body = [
json.dumps({'faultstring': fault})})] json.dumps(
body = [item.encode('utf-8') for item in body] {"error_message": json.dumps({"faultstring": fault})}
state['headers'].append(('Content-Type', 'application/json')) )
state['headers'].append(('Content-Length', str(len(body[0])))) ]
body = [item.encode("utf-8") for item in body]
state["headers"].append(("Content-Type", "application/json"))
state["headers"].append(("Content-Length", str(len(body[0]))))
else: else:
body = app_iter body = app_iter
return body return body

View File

@@ -41,9 +41,11 @@ LOG = logging.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
patch_opts = [ patch_opts = [
cfg.StrOpt('patch_vault', cfg.StrOpt(
default='/opt/dc-vault/patches/', "patch_vault",
help='file system for patch storage on SystemController'), default="/opt/dc-vault/patches/",
help="file system for patch storage on SystemController",
),
] ]
CONF.register_opts(patch_opts, CONF.type) CONF.register_opts(patch_opts, CONF.type)
@@ -58,8 +60,8 @@ class PatchAPIController(Middleware):
webob.exc.HTTPOk.code, webob.exc.HTTPOk.code,
] ]
PATCH_META_DATA = 'metadata.xml' PATCH_META_DATA = "metadata.xml"
SOFTWARE_VERSION = 'sw_version' SOFTWARE_VERSION = "sw_version"
def __init__(self, app, conf): def __init__(self, app, conf):
super(PatchAPIController, self).__init__(app) super(PatchAPIController, self).__init__(app)
@@ -89,7 +91,7 @@ class PatchAPIController(Middleware):
# check if the request was successful # check if the request was successful
if response.status_int in self.OK_STATUS_CODE: if response.status_int in self.OK_STATUS_CODE:
data = json.loads(response.text) data = json.loads(response.text)
if 'error' in data and data["error"] != "": if "error" in data and data["error"] != "":
rc = False rc = False
else: else:
rc = False rc = False
@@ -103,8 +105,7 @@ class PatchAPIController(Middleware):
msg = "Unable to fetch release version from patch" msg = "Unable to fetch release version from patch"
LOG.error(msg) LOG.error(msg)
raise webob.exc.HTTPUnprocessableEntity(explanation=msg) raise webob.exc.HTTPUnprocessableEntity(explanation=msg)
versioned_vault = CONF.patching.patch_vault + \ versioned_vault = CONF.patching.patch_vault + sw_version
sw_version
if not os.path.isdir(versioned_vault): if not os.path.isdir(versioned_vault):
os.makedirs(versioned_vault) os.makedirs(versioned_vault)
try: try:
@@ -125,8 +126,9 @@ class PatchAPIController(Middleware):
os.remove(fn) os.remove(fn)
return return
except OSError: except OSError:
msg = (f"Unable to remove patch file {fn} from the central " msg = (
"storage.") f"Unable to remove patch file {fn} from the central " "storage."
)
raise webob.exc.HTTPUnprocessableEntity(explanation=msg) raise webob.exc.HTTPUnprocessableEntity(explanation=msg)
LOG.info(f"Patch {patch} was not found in {vault}") LOG.info(f"Patch {patch} was not found in {vault}")
@@ -136,8 +138,8 @@ class PatchAPIController(Middleware):
# chunk, rather than reading the file into memory as a whole # chunk, rather than reading the file into memory as a whole
# write the patch to a temporary directory first # write the patch to a temporary directory first
tempdir = tempfile.mkdtemp(prefix="patch_proxy_", dir='/scratch') tempdir = tempfile.mkdtemp(prefix="patch_proxy_", dir="/scratch")
fn = tempdir + '/' + os.path.basename(filename) fn = tempdir + "/" + os.path.basename(filename)
dst = os.open(fn, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) dst = os.open(fn, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
size = 64 * 1024 size = 64 * 1024
n = size n = size
@@ -154,19 +156,22 @@ class PatchAPIController(Middleware):
def patch_upload_req(self, request, response): def patch_upload_req(self, request, response):
# stores patch in the patch storage # stores patch in the patch storage
file_item = request.POST['file'] file_item = request.POST["file"]
try: try:
self.store_patch_file(file_item.filename, file_item.file.fileno()) self.store_patch_file(file_item.filename, file_item.file.fileno())
except Exception: except Exception:
LOG.exception("Failed to store the patch to vault") LOG.exception("Failed to store the patch to vault")
# return a warning and prompt the user to try again # return a warning and prompt the user to try again
if hasattr(response, 'text'): if hasattr(response, "text"):
from builtins import str as text from builtins import str as text
data = json.loads(response.text) data = json.loads(response.text)
if 'warning' in data: if "warning" in data:
msg = _('The patch file could not be stored in the vault, ' msg = _(
'please upload the patch again!') "The patch file could not be stored in the vault, "
data['warning'] += msg "please upload the patch again!"
)
data["warning"] += msg
response.text = text(json.dumps(data)) response.text = text(json.dumps(data))
proxy_utils.cleanup(request.environ) proxy_utils.cleanup(request.environ)
return response return response
@@ -175,7 +180,7 @@ class PatchAPIController(Middleware):
files = [] files = []
for key, path in request.GET.items(): for key, path in request.GET.items():
LOG.info("upload-dir: Retrieving patches from %s" % path) LOG.info("upload-dir: Retrieving patches from %s" % path)
for f in glob.glob(path + '/*.patch'): for f in glob.glob(path + "/*.patch"):
if os.path.isfile(f): if os.path.isfile(f):
files.append(f) files.append(f)
@@ -190,7 +195,8 @@ class PatchAPIController(Middleware):
self.dcmanager_state_rpc_client.update_subcloud_endpoint_status( self.dcmanager_state_rpc_client.update_subcloud_endpoint_status(
self.ctxt, self.ctxt,
endpoint_type=self.ENDPOINT_TYPE, endpoint_type=self.ENDPOINT_TYPE,
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN) sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN,
)
return response return response
def notify_usm(self, request, response): def notify_usm(self, request, response):
@@ -199,18 +205,19 @@ class PatchAPIController(Middleware):
self.dcmanager_state_rpc_client.update_subcloud_endpoint_status( self.dcmanager_state_rpc_client.update_subcloud_endpoint_status(
self.ctxt, self.ctxt,
endpoint_type=self.USM_ENDPOINT_TYPE, endpoint_type=self.USM_ENDPOINT_TYPE,
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN) sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN,
)
return response return response
def patch_delete_req(self, request, response): def patch_delete_req(self, request, response):
patch_ids = proxy_utils.get_routing_match_value(request.environ, patch_ids = proxy_utils.get_routing_match_value(request.environ, "patch_id")
'patch_id')
LOG.info("Deleting patches: %s", patch_ids) LOG.info("Deleting patches: %s", patch_ids)
patch_list = os.path.normpath(patch_ids).split(os.path.sep) patch_list = os.path.normpath(patch_ids).split(os.path.sep)
for patch_file in patch_list: for patch_file in patch_list:
LOG.debug("Patch file:(%s)", patch_file) LOG.debug("Patch file:(%s)", patch_file)
self.delete_patch_from_version_vault(os.path.basename(patch_file) self.delete_patch_from_version_vault(
+ '.patch') os.path.basename(patch_file) + ".patch"
)
return response return response
def process_request(self, req): def process_request(self, req):
@@ -222,7 +229,7 @@ class PatchAPIController(Middleware):
if CONF.show_response: if CONF.show_response:
LOG.info("Response: (%s)", str(response)) LOG.info("Response: (%s)", str(response))
LOG.info("Response status: (%s)", response.status) LOG.info("Response status: (%s)", response.status)
action = proxy_utils.get_routing_match_value(request.environ, 'action') action = proxy_utils.get_routing_match_value(request.environ, "action")
if self.ok_response(response) and action in self.response_hander_map: if self.ok_response(response) and action in self.response_hander_map:
handler = self.response_hander_map[action] handler = self.response_hander_map[action]
return handler(request, response) return handler(request, response)

View File

@@ -21,7 +21,7 @@ from dcorch.api.proxy.common.service import Application
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
HEADERS = ['HTTP_X_DOMAIN_ID', 'HTTP_X_DOMAIN_NAME', 'HTTP_OPENSTACK_SYSTEM_SCOPE'] HEADERS = ["HTTP_X_DOMAIN_ID", "HTTP_X_DOMAIN_NAME", "HTTP_OPENSTACK_SYSTEM_SCOPE"]
class Proxy(Application): class Proxy(Application):
@@ -34,8 +34,7 @@ class Proxy(Application):
self.proxy_app = TransparentProxy() self.proxy_app = TransparentProxy()
def __call__(self, environ, start_response): def __call__(self, environ, start_response):
LOG.debug("Proxy the request to the remote host: (%s)", environ[ LOG.debug("Proxy the request to the remote host: (%s)", environ["HTTP_HOST"])
'HTTP_HOST'])
# The http/client.py added validation for illegal headers in python3 # The http/client.py added validation for illegal headers in python3
# which doesn't allow None values. If we don't inject these headers # which doesn't allow None values. If we don't inject these headers
# and set them to empty string here, # and set them to empty string here,
@@ -44,6 +43,6 @@ class Proxy(Application):
# an TypeError due to the None values. # an TypeError due to the None values.
for header in HEADERS: for header in HEADERS:
if not environ.get(header): if not environ.get(header):
environ[header] = '' environ[header] = ""
result = self.proxy_app(environ, start_response) result = self.proxy_app(environ, start_response)
return result return result

View File

@@ -1,4 +1,4 @@
# Copyright 2017 Wind River # Copyright 2017, 2024 Wind River
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. # you may not use this file except in compliance with the License.
@@ -33,15 +33,10 @@ CONF = cfg.CONF
class Router(Middleware): class Router(Middleware):
"""WSGI middleware that maps incoming requests to WSGI apps. """WSGI middleware that maps incoming requests to WSGI apps."""
"""
def __init__(self, app, conf, mapper, forwarder): def __init__(self, app, conf, mapper, forwarder):
"""Create a router for the given routes.Mapper."""
"""Create a router for the given routes.Mapper.
"""
self.map = mapper self.map = mapper
self.forwarder = forwarder self.forwarder = forwarder
@@ -50,39 +45,40 @@ class Router(Middleware):
@webob.dec.wsgify(RequestClass=Request) @webob.dec.wsgify(RequestClass=Request)
def __call__(self, req): def __call__(self, req):
"""Route the incoming request to a controller based on self.map."""
"""Route the incoming request to a controller based on self.map.
"""
return self._router return self._router
@webob.dec.wsgify @webob.dec.wsgify
def _dispatch(self, req): def _dispatch(self, req):
"""Called by self._router after matching the incoming request to a """Called by self._router after matching the incoming request to a
route and putting the information into req.environ. route and putting the information into req.environ.
""" """
match = req.environ['wsgiorg.routing_args'][1] match = req.environ["wsgiorg.routing_args"][1]
if not match: if not match:
if self.forwarder: if self.forwarder:
return self.forwarder return self.forwarder
msg = _('The request is not allowed in System Controller') msg = _("The request is not allowed in System Controller")
proxy_utils.cleanup(req.environ) proxy_utils.cleanup(req.environ)
raise webob.exc.HTTPForbidden(explanation=msg) raise webob.exc.HTTPForbidden(explanation=msg)
LOG.debug("Found match action!") LOG.debug("Found match action!")
app = match['controller'] app = match["controller"]
return app return app
@staticmethod @staticmethod
def _add_resource(mapper, controller, paths, tag, endpoint_type, def _add_resource(
action=None, method=None): mapper, controller, paths, tag, endpoint_type, action=None, method=None
):
if action is None: if action is None:
action = tag action = tag
if method is None: if method is None:
method = constants.ROUTE_METHOD_MAP[endpoint_type].get(tag) method = constants.ROUTE_METHOD_MAP[endpoint_type].get(tag)
for path in paths: for path in paths:
mapper.connect(path, controller=controller, action=action, mapper.connect(
conditions=dict(method=method)) path,
controller=controller,
action=action,
conditions=dict(method=method),
)

View File

@@ -17,56 +17,52 @@ from dcorch.common import consts
# Version could be any of the following: /, /v1, /v1/ # Version could be any of the following: /, /v1, /v1/
# but must deny regular paths such as /v1/isystems # but must deny regular paths such as /v1/isystems
VERSION_ROOT = '/{version:[^/]*?(\/$)?}' VERSION_ROOT = "/{version:[^/]*?(\/$)?}"
# Compute # Compute
FLAVOR_RESOURCE_TAG = 'flavors' FLAVOR_RESOURCE_TAG = "flavors"
FLAVOR_ACCESS_RESOURCE_TAG = 'action' FLAVOR_ACCESS_RESOURCE_TAG = "action"
FLAVOR_EXTRA_SPECS_RESOURCE_TAG = 'os-extra_specs' FLAVOR_EXTRA_SPECS_RESOURCE_TAG = "os-extra_specs"
KEYPAIRS_RESOURCE_TAG = 'os-keypairs' KEYPAIRS_RESOURCE_TAG = "os-keypairs"
QUOTA_RESOURCE_TAG = 'os-quota-sets' QUOTA_RESOURCE_TAG = "os-quota-sets"
QUOTA_CLASS_RESOURCE_TAG = 'os-quota-class-sets' QUOTA_CLASS_RESOURCE_TAG = "os-quota-class-sets"
FLAVOR_PATHS = [ FLAVOR_PATHS = [
'/v2.1/{project_id:.*?}/flavors', "/v2.1/{project_id:.*?}/flavors",
'/v2.1/{project_id:.*?}/flavors/{flavor_id}' "/v2.1/{project_id:.*?}/flavors/{flavor_id}",
] ]
FLAVOR_ACCESS_PATHS = [ FLAVOR_ACCESS_PATHS = ["/v2.1/{project_id:.*?}/flavors/{flavor_id}/action"]
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/action'
]
EXTRA_SPECS_PATHS = [ EXTRA_SPECS_PATHS = [
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs', "/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs",
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs/{extra_spec}' "/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs/{extra_spec}",
] ]
KEYPAIRS_PATHS = [ KEYPAIRS_PATHS = [
'/v2.1/{project_id:.*?}/os-keypairs', "/v2.1/{project_id:.*?}/os-keypairs",
'/v2.1/{project_id:.*?}/os-keypairs/{keypair}' "/v2.1/{project_id:.*?}/os-keypairs/{keypair}",
] ]
QUOTA_PATHS = [ QUOTA_PATHS = [
'/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}', "/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}",
] ]
QUOTA_DETAIL_PATHS = [ QUOTA_DETAIL_PATHS = [
'/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}/detail', "/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}/detail",
] ]
QUOTA_CLASS_PATHS = [ QUOTA_CLASS_PATHS = [
'/v2.1/{project_id:.*?}/os-quota-class-sets/{id}', "/v2.1/{project_id:.*?}/os-quota-class-sets/{id}",
] ]
COMPUTE_PATH_MAP = { COMPUTE_PATH_MAP = {
consts.RESOURCE_TYPE_COMPUTE_FLAVOR: { consts.RESOURCE_TYPE_COMPUTE_FLAVOR: {
FLAVOR_RESOURCE_TAG: FLAVOR_PATHS, FLAVOR_RESOURCE_TAG: FLAVOR_PATHS,
FLAVOR_ACCESS_RESOURCE_TAG: FLAVOR_ACCESS_PATHS, FLAVOR_ACCESS_RESOURCE_TAG: FLAVOR_ACCESS_PATHS,
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: EXTRA_SPECS_PATHS FLAVOR_EXTRA_SPECS_RESOURCE_TAG: EXTRA_SPECS_PATHS,
},
consts.RESOURCE_TYPE_COMPUTE_KEYPAIR: {
KEYPAIRS_RESOURCE_TAG: KEYPAIRS_PATHS
}, },
consts.RESOURCE_TYPE_COMPUTE_KEYPAIR: {KEYPAIRS_RESOURCE_TAG: KEYPAIRS_PATHS},
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET: { consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET: {
QUOTA_RESOURCE_TAG: QUOTA_PATHS, QUOTA_RESOURCE_TAG: QUOTA_PATHS,
}, },
@@ -76,24 +72,13 @@ COMPUTE_PATH_MAP = {
} }
# Sysinv # Sysinv
CERTIFICATE_PATHS = [ CERTIFICATE_PATHS = ["/v1/certificate/certificate_install", "/v1/certificate/{uuid}"]
'/v1/certificate/certificate_install',
'/v1/certificate/{uuid}'
]
USER_PATHS = [ USER_PATHS = ["/v1/iuser/{uuid}"]
'/v1/iuser/{uuid}'
]
LOAD_PATHS = [ LOAD_PATHS = ["/v1/loads/import_load", "/v1/loads/{id}"]
'/v1/loads/import_load',
'/v1/loads/{id}'
]
DEVICE_IMAGE_PATHS = [ DEVICE_IMAGE_PATHS = ["/v1/device_images", "/v1/device_images/{uuid}"]
'/v1/device_images',
'/v1/device_images/{uuid}'
]
SYSINV_PATH_MAP = { SYSINV_PATH_MAP = {
consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: CERTIFICATE_PATHS, consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: CERTIFICATE_PATHS,
@@ -102,19 +87,19 @@ SYSINV_PATH_MAP = {
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: DEVICE_IMAGE_PATHS, consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: DEVICE_IMAGE_PATHS,
} }
LOAD_FILES_STAGING_DIR = '/scratch/tmp_load' LOAD_FILES_STAGING_DIR = "/scratch/tmp_load"
IMPORT_LOAD_FILES = ['path_to_iso', 'path_to_sig'] IMPORT_LOAD_FILES = ["path_to_iso", "path_to_sig"]
IMPORTED_LOAD_MAX_COUNT = 1 IMPORTED_LOAD_MAX_COUNT = 1
DEVICE_IMAGE_VAULT_DIR = '/opt/dc-vault/device_images' DEVICE_IMAGE_VAULT_DIR = "/opt/dc-vault/device_images"
# Cinder # Cinder
CINDER_QUOTA_PATHS = [ CINDER_QUOTA_PATHS = [
'/{version}/{admin_project_id}/os-quota-sets/{project_id}', "/{version}/{admin_project_id}/os-quota-sets/{project_id}",
] ]
CINDER_QUOTA_CLASS_PATHS = [ CINDER_QUOTA_CLASS_PATHS = [
'/{version}/{admin_project_id}/os-quota-class-sets/{quota_class_name}', "/{version}/{admin_project_id}/os-quota-class-sets/{quota_class_name}",
] ]
CINDER_PATH_MAP = { CINDER_PATH_MAP = {
@@ -127,152 +112,142 @@ CINDER_PATH_MAP = {
} }
# Neutron # Neutron
NEUTRON_SECURITY_GROUPS_PATHS = [ NEUTRON_SEC_GROUPS_PATHS = [
'/v2.0/security-groups', "/v2.0/security-groups",
'/v2.0/security-groups/{security_group_id}', "/v2.0/security-groups/{security_group_id}",
] ]
NEUTRON_SECURITY_GROUP_RULES_PATHS = [ NEUTRON_SEC_GROUP_RULES_PATHS = [
'/v2.0/security-group-rules', "/v2.0/security-group-rules",
'/v2.0/security-group-rules/{security_group_rule_id}', "/v2.0/security-group-rules/{security_group_rule_id}",
] ]
NEUTRON_QOS_PATHS = [ NEUTRON_QOS_PATHS = [
'/v2.0/qos/policies', "/v2.0/qos/policies",
'/v2.0/wrs-tm/qoses', "/v2.0/wrs-tm/qoses",
'/v2.0/qos/policies/{policy_id}', "/v2.0/qos/policies/{policy_id}",
'/v2.0/wrs-tm/qoses/{policy_id}', "/v2.0/wrs-tm/qoses/{policy_id}",
] ]
NEUTRON_BANDWIDTH_LIMIT_RULES_PATHS = [ NEUTRON_BANDWIDTH_LIMIT_RULES_PATHS = [
'/v2.0/qos/policies/{policy_id}/bandwidth_limit_rules', "/v2.0/qos/policies/{policy_id}/bandwidth_limit_rules",
] ]
NEUTRON_DSCP_MARKING_RULES_PATHS = [ NEUTRON_DSCP_MARKING_RULES_PATHS = [
'/v2.0/qos/policies/{policy_id}/dscp_marking_rules', "/v2.0/qos/policies/{policy_id}/dscp_marking_rules",
] ]
NEUTRON_MINIMUM_BANDWIDTH_RULES_PATHS = [ NEUTRON_MINIMUM_BANDWIDTH_RULES_PATHS = [
'/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules', "/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules",
'/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}', "/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}",
] ]
NEUTRON_QUOTA_PATHS = [ NEUTRON_QUOTA_PATHS = [
'/v2.0/quotas/{project_id}', "/v2.0/quotas/{project_id}",
] ]
NEUTRON_QUOTA_DETAIL_PATHS = [ NEUTRON_QUOTA_DETAIL_PATHS = [
'/v2.0/quotas/{project_id}/details.json', "/v2.0/quotas/{project_id}/details.json",
] ]
NEUTRON_PATH_MAP = { NEUTRON_PATH_MAP = {
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: NEUTRON_SEC_GROUPS_PATHS,
NEUTRON_SECURITY_GROUPS_PATHS, consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: NEUTRON_SEC_GROUP_RULES_PATHS,
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: NEUTRON_QUOTA_PATHS,
NEUTRON_SECURITY_GROUP_RULES_PATHS, consts.RESOURCE_TYPE_QOS_POLICY: NEUTRON_QOS_PATHS,
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET:
NEUTRON_QUOTA_PATHS,
consts.RESOURCE_TYPE_QOS_POLICY:
NEUTRON_QOS_PATHS,
} }
# Software # Software
SOFTWARE_ACTION_QUERY = 'query' SOFTWARE_ACTION_QUERY = "query"
SOFTWARE_ACTION_QUERY_DEPENDENCIES = 'query_dependencies' SOFTWARE_ACTION_QUERY_DEPENDENCIES = "query_dependencies"
SOFTWARE_ACTION_COMMIT_PATCH = 'commit-patch' SOFTWARE_ACTION_COMMIT_PATCH = "commit-patch"
SOFTWARE_ACTION_SHOW = 'show' SOFTWARE_ACTION_SHOW = "show"
SOFTWARE_QUERY_PATHS = [ SOFTWARE_QUERY_PATHS = [
'/v1/query', "/v1/query",
'/v1/software/query', "/v1/software/query",
] ]
SOFTWARE_SHOW_PATHS = [ SOFTWARE_SHOW_PATHS = [
'/v1/show/{release_id}', "/v1/show/{release_id}",
'/v1/software/show/{release_id:.*?}', "/v1/software/show/{release_id:.*?}",
] ]
SOFTWARE_COMMIT_PATCH_PATHS = [ SOFTWARE_COMMIT_PATCH_PATHS = [
'/v1/software/commit_dry_run/{release_id:.*?}', "/v1/software/commit_dry_run/{release_id:.*?}",
'/v1/software/commit_patch/{release_id:.*?}', "/v1/software/commit_patch/{release_id:.*?}",
] ]
SOFTWARE_QUERY_DEPENDENCIES_PATHS = [ SOFTWARE_QUERY_DEPENDENCIES_PATHS = [
'/v1/software/query_dependencies/{release_id:.*?}', "/v1/software/query_dependencies/{release_id:.*?}",
] ]
SOFTWARE_PATH_MAP = { SOFTWARE_PATH_MAP = {
SOFTWARE_ACTION_QUERY: SOFTWARE_QUERY_PATHS, SOFTWARE_ACTION_QUERY: SOFTWARE_QUERY_PATHS,
SOFTWARE_ACTION_SHOW: SOFTWARE_SHOW_PATHS, SOFTWARE_ACTION_SHOW: SOFTWARE_SHOW_PATHS,
SOFTWARE_ACTION_COMMIT_PATCH: SOFTWARE_COMMIT_PATCH_PATHS, SOFTWARE_ACTION_COMMIT_PATCH: SOFTWARE_COMMIT_PATCH_PATHS,
SOFTWARE_ACTION_QUERY_DEPENDENCIES: SOFTWARE_QUERY_DEPENDENCIES_PATHS SOFTWARE_ACTION_QUERY_DEPENDENCIES: SOFTWARE_QUERY_DEPENDENCIES_PATHS,
} }
# Patching # Patching
# allow version request # allow version request
PATCH_ACTION_GET_VERSION = 'version' PATCH_ACTION_GET_VERSION = "version"
PATCH_ACTION_UPLOAD = 'upload' PATCH_ACTION_UPLOAD = "upload"
PATCH_ACTION_UPLOAD_DIR = 'upload_dir' PATCH_ACTION_UPLOAD_DIR = "upload_dir"
PATCH_ACTION_APPLY = 'apply' PATCH_ACTION_APPLY = "apply"
PATCH_ACTION_REMOVE = 'remove' PATCH_ACTION_REMOVE = "remove"
PATCH_ACTION_DELETE = 'delete' PATCH_ACTION_DELETE = "delete"
PATCH_ACTION_QUERY = 'query' PATCH_ACTION_QUERY = "query"
PATCH_ACTION_SHOW = 'show' PATCH_ACTION_SHOW = "show"
PATCH_ACTION_COMMIT = 'commit' PATCH_ACTION_COMMIT = "commit"
PATCH_ACTION_WHAT_REQS = 'what_requires' PATCH_ACTION_WHAT_REQS = "what_requires"
PATCH_ACTION_QUERY_DEPS = 'query_dependencies' PATCH_ACTION_QUERY_DEPS = "query_dependencies"
PATCH_API_VERSION = ['/'] PATCH_API_VERSION = ["/"]
PATCH_UPLOAD_PATHS = [ PATCH_UPLOAD_PATHS = [
'/v1/upload', "/v1/upload",
'/patch/upload', "/patch/upload",
] ]
# upload_dir is not supported for REST API access # upload_dir is not supported for REST API access
PATCH_UPLOAD_DIR_PATHS = [ PATCH_UPLOAD_DIR_PATHS = ["/patch/upload_dir"]
'/patch/upload_dir'
]
PATCH_APPLY_PATHS = [ PATCH_APPLY_PATHS = [
'/v1/apply/{patch_id}', "/v1/apply/{patch_id}",
'/patch/apply/{patch_id:.*?}', "/patch/apply/{patch_id:.*?}",
] ]
PATCH_REMOVE_PATHS = [ PATCH_REMOVE_PATHS = [
'/v1/remove/{patch_id}', "/v1/remove/{patch_id}",
'/patch/remove/{patch_id:.*?}', "/patch/remove/{patch_id:.*?}",
] ]
PATCH_DELETE_PATHS = [ PATCH_DELETE_PATHS = [
'/v1/delete/{patch_id}', "/v1/delete/{patch_id}",
'/patch/delete/{patch_id:.*?}', "/patch/delete/{patch_id:.*?}",
] ]
PATCH_QUERY_PATHS = [ PATCH_QUERY_PATHS = [
'/v1/query', "/v1/query",
'/patch/query', "/patch/query",
] ]
PATCH_SHOW_PATHS = [ PATCH_SHOW_PATHS = [
'/v1/show/{patch_id}', "/v1/show/{patch_id}",
'/patch/show/{patch_id:.*?}', "/patch/show/{patch_id:.*?}",
] ]
PATCH_COMMIT_PATHS = [ PATCH_COMMIT_PATHS = [
'/patch/commit_dry_run/{patch_id:.*?}', "/patch/commit_dry_run/{patch_id:.*?}",
'/patch/commit/{patch_id:.*?}', "/patch/commit/{patch_id:.*?}",
] ]
PATCH_WHAT_REQS_PATHS = [ PATCH_WHAT_REQS_PATHS = ["/patch/what_requires/{patch_id:.*?}"]
'/patch/what_requires/{patch_id:.*?}'
]
PATCH_QUERY_DEPS_PATHS = [ PATCH_QUERY_DEPS_PATHS = ["/patch/query_dependencies/{patch_id:.*?}"]
'/patch/query_dependencies/{patch_id:.*?}'
]
PATCH_PATH_MAP = { PATCH_PATH_MAP = {
PATCH_ACTION_GET_VERSION: PATCH_API_VERSION, PATCH_ACTION_GET_VERSION: PATCH_API_VERSION,
@@ -290,37 +265,37 @@ PATCH_PATH_MAP = {
# Identity # Identity
IDENTITY_USERS_PATH = [ IDENTITY_USERS_PATH = [
'/v3/users', "/v3/users",
'/v3/users/{user_id}', "/v3/users/{user_id}",
] ]
IDENTITY_USERS_PW_PATH = [ IDENTITY_USERS_PW_PATH = [
'/v3/users/{user_id}/password', "/v3/users/{user_id}/password",
] ]
IDENTITY_USER_GROUPS_PATH = [ IDENTITY_USER_GROUPS_PATH = [
'/v3/groups', "/v3/groups",
'/v3/groups/{group_id}', "/v3/groups/{group_id}",
'/v3/groups/{group_id}/users/{user_id}', "/v3/groups/{group_id}/users/{user_id}",
] ]
IDENTITY_ROLES_PATH = [ IDENTITY_ROLES_PATH = [
'/v3/roles', "/v3/roles",
'/v3/roles/{role_id}', "/v3/roles/{role_id}",
] ]
IDENTITY_PROJECTS_PATH = [ IDENTITY_PROJECTS_PATH = [
'/v3/projects', "/v3/projects",
'/v3/projects/{project_id}', "/v3/projects/{project_id}",
] ]
IDENTITY_PROJECTS_ROLE_PATH = [ IDENTITY_PROJECTS_ROLE_PATH = [
'/v3/projects/{project_id}/users/{user_id}/roles/{role_id}', "/v3/projects/{project_id}/users/{user_id}/roles/{role_id}",
'/v3/projects/{project_id}/groups/{group_id}/roles/{role_id}', "/v3/projects/{project_id}/groups/{group_id}/roles/{role_id}",
] ]
IDENTITY_TOKEN_REVOKE_EVENTS_PATH = [ IDENTITY_TOKEN_EVENTS_PATH = [
'/v3/auth/tokens', "/v3/auth/tokens",
] ]
IDENTITY_PATH_MAP = { IDENTITY_PATH_MAP = {
@@ -329,74 +304,64 @@ IDENTITY_PATH_MAP = {
consts.RESOURCE_TYPE_IDENTITY_GROUPS: IDENTITY_USER_GROUPS_PATH, consts.RESOURCE_TYPE_IDENTITY_GROUPS: IDENTITY_USER_GROUPS_PATH,
consts.RESOURCE_TYPE_IDENTITY_ROLES: IDENTITY_ROLES_PATH, consts.RESOURCE_TYPE_IDENTITY_ROLES: IDENTITY_ROLES_PATH,
consts.RESOURCE_TYPE_IDENTITY_PROJECTS: IDENTITY_PROJECTS_PATH, consts.RESOURCE_TYPE_IDENTITY_PROJECTS: IDENTITY_PROJECTS_PATH,
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: IDENTITY_PROJECTS_ROLE_PATH,
IDENTITY_PROJECTS_ROLE_PATH, consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS: IDENTITY_TOKEN_EVENTS_PATH,
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS:
IDENTITY_TOKEN_REVOKE_EVENTS_PATH,
} }
ROUTE_METHOD_MAP = { ROUTE_METHOD_MAP = {
consts.ENDPOINT_TYPE_COMPUTE: { consts.ENDPOINT_TYPE_COMPUTE: {
FLAVOR_RESOURCE_TAG: ['POST', 'DELETE'], FLAVOR_RESOURCE_TAG: ["POST", "DELETE"],
FLAVOR_ACCESS_RESOURCE_TAG: ['POST'], FLAVOR_ACCESS_RESOURCE_TAG: ["POST"],
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: ['POST', 'PUT', 'DELETE'], FLAVOR_EXTRA_SPECS_RESOURCE_TAG: ["POST", "PUT", "DELETE"],
KEYPAIRS_RESOURCE_TAG: ['POST', 'DELETE'], KEYPAIRS_RESOURCE_TAG: ["POST", "DELETE"],
QUOTA_RESOURCE_TAG: ['PUT', 'DELETE', 'GET'], QUOTA_RESOURCE_TAG: ["PUT", "DELETE", "GET"],
QUOTA_CLASS_RESOURCE_TAG: ['PUT'], QUOTA_CLASS_RESOURCE_TAG: ["PUT"],
}, },
consts.ENDPOINT_TYPE_VOLUME: { consts.ENDPOINT_TYPE_VOLUME: {
QUOTA_RESOURCE_TAG: ['PUT', 'DELETE', 'GET'], QUOTA_RESOURCE_TAG: ["PUT", "DELETE", "GET"],
QUOTA_CLASS_RESOURCE_TAG: ['PUT'], QUOTA_CLASS_RESOURCE_TAG: ["PUT"],
}, },
dccommon_consts.ENDPOINT_TYPE_PLATFORM: { dccommon_consts.ENDPOINT_TYPE_PLATFORM: {
consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: ['POST', 'DELETE'], consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: ["POST", "DELETE"],
consts.RESOURCE_TYPE_SYSINV_USER: ['PATCH', 'PUT'], consts.RESOURCE_TYPE_SYSINV_USER: ["PATCH", "PUT"],
consts.RESOURCE_TYPE_SYSINV_LOAD: ['POST', 'DELETE'], consts.RESOURCE_TYPE_SYSINV_LOAD: ["POST", "DELETE"],
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: ['POST', 'PATCH', 'DELETE'], consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: ["POST", "PATCH", "DELETE"],
}, },
consts.ENDPOINT_TYPE_NETWORK: { consts.ENDPOINT_TYPE_NETWORK: {
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: ['POST', 'PUT', 'DELETE'], consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: ["POST", "PUT", "DELETE"],
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: ['POST', 'DELETE'], consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: ["POST", "DELETE"],
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: ['PUT', 'DELETE'], consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: ["PUT", "DELETE"],
consts.RESOURCE_TYPE_QOS_POLICY: ['POST', 'PUT', 'DELETE'], consts.RESOURCE_TYPE_QOS_POLICY: ["POST", "PUT", "DELETE"],
}, },
dccommon_consts.ENDPOINT_TYPE_PATCHING: { dccommon_consts.ENDPOINT_TYPE_PATCHING: {
PATCH_ACTION_GET_VERSION: ['GET'], PATCH_ACTION_GET_VERSION: ["GET"],
PATCH_ACTION_UPLOAD: ['POST'], PATCH_ACTION_UPLOAD: ["POST"],
PATCH_ACTION_UPLOAD_DIR: ['POST'], PATCH_ACTION_UPLOAD_DIR: ["POST"],
PATCH_ACTION_APPLY: ['POST'], PATCH_ACTION_APPLY: ["POST"],
PATCH_ACTION_REMOVE: ['POST'], PATCH_ACTION_REMOVE: ["POST"],
PATCH_ACTION_DELETE: ['POST'], PATCH_ACTION_DELETE: ["POST"],
PATCH_ACTION_QUERY: ['GET'], PATCH_ACTION_QUERY: ["GET"],
PATCH_ACTION_SHOW: ['POST', 'GET'], PATCH_ACTION_SHOW: ["POST", "GET"],
PATCH_ACTION_COMMIT: ['POST'], PATCH_ACTION_COMMIT: ["POST"],
PATCH_ACTION_WHAT_REQS: ['GET'], PATCH_ACTION_WHAT_REQS: ["GET"],
PATCH_ACTION_QUERY_DEPS: ['GET'], PATCH_ACTION_QUERY_DEPS: ["GET"],
SOFTWARE_ACTION_QUERY: ['GET'], SOFTWARE_ACTION_QUERY: ["GET"],
SOFTWARE_ACTION_SHOW: ['GET'], SOFTWARE_ACTION_SHOW: ["GET"],
SOFTWARE_ACTION_QUERY_DEPENDENCIES: ['GET'], SOFTWARE_ACTION_QUERY_DEPENDENCIES: ["GET"],
SOFTWARE_ACTION_COMMIT_PATCH: ['POST'], SOFTWARE_ACTION_COMMIT_PATCH: ["POST"],
}, },
dccommon_consts.ENDPOINT_TYPE_IDENTITY: { dccommon_consts.ENDPOINT_TYPE_IDENTITY: {
consts.RESOURCE_TYPE_IDENTITY_USERS: consts.RESOURCE_TYPE_IDENTITY_USERS: ["POST", "PATCH", "DELETE"],
['POST', 'PATCH', 'DELETE'], consts.RESOURCE_TYPE_IDENTITY_GROUPS: ["POST", "PUT", "PATCH", "DELETE"],
consts.RESOURCE_TYPE_IDENTITY_GROUPS: consts.RESOURCE_TYPE_IDENTITY_USERS_PASSWORD: ["POST"],
['POST', 'PUT', 'PATCH', 'DELETE'], consts.RESOURCE_TYPE_IDENTITY_ROLES: ["POST", "PATCH", "DELETE"],
consts.RESOURCE_TYPE_IDENTITY_USERS_PASSWORD: consts.RESOURCE_TYPE_IDENTITY_PROJECTS: ["POST", "PATCH", "DELETE"],
['POST'], consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: ["PUT", "DELETE"],
consts.RESOURCE_TYPE_IDENTITY_ROLES: consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS: ["DELETE"],
['POST', 'PATCH', 'DELETE'], },
consts.RESOURCE_TYPE_IDENTITY_PROJECTS:
['POST', 'PATCH', 'DELETE'],
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS:
['PUT', 'DELETE'],
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS:
['DELETE']
}
} }
LOAD_VAULT_DIR = '/opt/dc-vault/loads' LOAD_VAULT_DIR = "/opt/dc-vault/loads"
LOAD_VAULT_TMP_DIR = '/opt/dc-vault/loads/load_tmpdir' LOAD_VAULT_TMP_DIR = "/opt/dc-vault/loads/load_tmpdir"
ENDPOINT_TYPE_PATCHING_TMPDIR = "/scratch/patch-api-proxy-tmpdir" ENDPOINT_TYPE_PATCHING_TMPDIR = "/scratch/patch-api-proxy-tmpdir"
ENDPOINT_TYPE_PLATFORM_TMPDIR = "/scratch/platform-api-proxy-tmpdir" ENDPOINT_TYPE_PLATFORM_TMPDIR = "/scratch/platform-api-proxy-tmpdir"

View File

@@ -25,17 +25,14 @@ class Application(object):
@classmethod @classmethod
def factory(cls, global_config, **local_config): def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files. """Used for paste app factories in paste.deploy config files."""
"""
return cls(**local_config) return cls(**local_config)
def __call__(self, environ, start_response): def __call__(self, environ, start_response):
raise NotImplementedError('You must implement __call__') raise NotImplementedError("You must implement __call__")
class Middleware(Application): class Middleware(Application):
"""Base WSGI middleware wrapper. """Base WSGI middleware wrapper.
These classes require an application to be These classes require an application to be
@@ -46,7 +43,6 @@ class Middleware(Application):
@classmethod @classmethod
def factory(cls, global_config, **local_config): def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files. """Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [filter:APPNAME] Any local configuration (that is, values under the [filter:APPNAME]
@@ -58,13 +54,13 @@ class Middleware(Application):
# https://bugs.launchpad.net/starlingx/+bug/1865085 # https://bugs.launchpad.net/starlingx/+bug/1865085
# pylint: disable-next=too-many-function-args # pylint: disable-next=too-many-function-args
return cls(app, global_config, **local_config) return cls(app, global_config, **local_config)
return _factory return _factory
def __init__(self, application): def __init__(self, application):
self.application = application self.application = application
def process_request(self, req): def process_request(self, req):
"""Called on each request. """Called on each request.
If this returns None, the next application down the stack will be If this returns None, the next application down the stack will be

View File

@@ -24,7 +24,7 @@ import psutil
from dccommon import consts as dccommon_consts from dccommon import consts as dccommon_consts
from dccommon.drivers.openstack.sdk_platform import ( from dccommon.drivers.openstack.sdk_platform import (
OptimizedOpenStackDriver as OpenStackDriver OptimizedOpenStackDriver as OpenStackDriver,
) )
from dcorch.common import consts from dcorch.common import consts
@@ -96,11 +96,11 @@ def get_sync_endpoint(cfg):
def get_url_path_components(url): def get_url_path_components(url):
result = urlparse(url) result = urlparse(url)
return result.path.split('/') return result.path.split("/")
def get_routing_match_arguments(environ): def get_routing_match_arguments(environ):
return environ['wsgiorg.routing_args'][1] return environ["wsgiorg.routing_args"][1]
def get_routing_match_value(environ, key): def get_routing_match_value(environ, key):
@@ -115,37 +115,36 @@ def get_routing_match_value(environ, key):
def get_operation_type(environ): def get_operation_type(environ):
return environ['REQUEST_METHOD'].lower() return environ["REQUEST_METHOD"].lower()
def get_id_from_query_string(environ, id): def get_id_from_query_string(environ, id):
import urllib.parse as six_urlparse import urllib.parse as six_urlparse
params = six_urlparse.parse_qs(environ.get('QUERY_STRING', ''))
params = six_urlparse.parse_qs(environ.get("QUERY_STRING", ""))
return params.get(id, [None])[0] return params.get(id, [None])[0]
def get_user_id(environ): def get_user_id(environ):
return get_id_from_query_string(environ, 'user_id') return get_id_from_query_string(environ, "user_id")
def show_usage(environ): def show_usage(environ):
return get_id_from_query_string(environ, 'usage') == 'True' return get_id_from_query_string(environ, "usage") == "True"
def get_tenant_id(environ): def get_tenant_id(environ):
return get_routing_match_value(environ, 'tenant_id') return get_routing_match_value(environ, "tenant_id")
def set_request_forward_environ(req, remote_host, remote_port): def set_request_forward_environ(req, remote_host, remote_port):
req.environ['HTTP_X_FORWARDED_SERVER'] = req.environ.get( req.environ["HTTP_X_FORWARDED_SERVER"] = req.environ.get("HTTP_HOST", "")
'HTTP_HOST', '') req.environ["HTTP_X_FORWARDED_SCHEME"] = req.environ["wsgi.url_scheme"]
req.environ['HTTP_X_FORWARDED_SCHEME'] = req.environ['wsgi.url_scheme'] req.environ["HTTP_HOST"] = remote_host + ":" + str(remote_port)
req.environ['HTTP_HOST'] = remote_host + ':' + str(remote_port) req.environ["SERVER_NAME"] = remote_host
req.environ['SERVER_NAME'] = remote_host req.environ["SERVER_PORT"] = remote_port
req.environ['SERVER_PORT'] = remote_port if "REMOTE_ADDR" in req.environ and "HTTP_X_FORWARDED_FOR" not in req.environ:
if ('REMOTE_ADDR' in req.environ and 'HTTP_X_FORWARDED_FOR' not in req.environ["HTTP_X_FORWARDED_FOR"] = req.environ["REMOTE_ADDR"]
req.environ):
req.environ['HTTP_X_FORWARDED_FOR'] = req.environ['REMOTE_ADDR']
def _get_fernet_keys(): def _get_fernet_keys():
@@ -157,18 +156,21 @@ def _get_fernet_keys():
) )
try: try:
key_list = os_client.sysinv_client.get_fernet_keys() key_list = os_client.sysinv_client.get_fernet_keys()
return [str(getattr(key, 'key')) for key in key_list] return [str(getattr(key, "key")) for key in key_list]
except (keystone_exceptions.connection.ConnectTimeout, except (
keystone_exceptions.ConnectFailure) as e: keystone_exceptions.connection.ConnectTimeout,
LOG.info("get_fernet_keys: cloud {} is not reachable [{}]" keystone_exceptions.ConnectFailure,
.format(dccommon_consts.CLOUD_0, str(e))) ) as e:
LOG.info(
"get_fernet_keys: cloud {} is not reachable [{}]".format(
dccommon_consts.CLOUD_0, str(e)
)
)
OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0) OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0)
return None return None
except (AttributeError, TypeError) as e: except (AttributeError, TypeError) as e:
LOG.info("get_fernet_keys error {}".format(e)) LOG.info("get_fernet_keys error {}".format(e))
OpenStackDriver.delete_region_clients( OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0, clear_token=True)
dccommon_consts.CLOUD_0, clear_token=True
)
return None return None
except Exception as e: except Exception as e:
LOG.exception(e) LOG.exception(e)
@@ -186,7 +188,7 @@ def _restore_padding(token):
mod_returned = len(token) % 4 mod_returned = len(token) % 4
if mod_returned: if mod_returned:
missing_padding = 4 - mod_returned missing_padding = 4 - mod_returned
token += b'=' * missing_padding token += b"=" * missing_padding
return token return token
@@ -230,8 +232,11 @@ def retrieve_token_audit_id(fernet_token):
unpacked_token = _unpack_token(fernet_token, fernet_keys) unpacked_token = _unpack_token(fernet_token, fernet_keys)
if unpacked_token: if unpacked_token:
audit_id = unpacked_token[-1][0] audit_id = unpacked_token[-1][0]
audit_id = base64.urlsafe_b64encode( audit_id = (
audit_id.encode('utf-8')).rstrip(b'=').decode('utf-8') base64.urlsafe_b64encode(audit_id.encode("utf-8"))
.rstrip(b"=")
.decode("utf-8")
)
return audit_id return audit_id
@@ -243,12 +248,12 @@ def cleanup(environ):
:return: None :return: None
""" """
if 'webob._parsed_post_vars' in environ: if "webob._parsed_post_vars" in environ:
post_vars, body_file = environ['webob._parsed_post_vars'] post_vars, body_file = environ["webob._parsed_post_vars"]
# the content is copied into a BytesIO or temporary file # the content is copied into a BytesIO or temporary file
if not isinstance(body_file, bytes): if not isinstance(body_file, bytes):
body_file.close() body_file.close()
for f in post_vars.keys(): for f in post_vars.keys():
item = post_vars[f] item = post_vars[f]
if hasattr(item, 'file'): if hasattr(item, "file"):
item.file.close() item.file.close()

View File

@@ -22,7 +22,11 @@ modules = [
] ]
# List of modules that are already formatted with black # List of modules that are already formatted with black
formatted_modules = ["dccommon", "dcdbsync"] formatted_modules = [
"dccommon",
"dcdbsync",
"dcorch/api",
]
# Function to run black check # Function to run black check