diff --git a/distributedcloud/dcdbsync/__init__.py b/distributedcloud/dcdbsync/__init__.py index 793684a92..c5e5024e2 100644 --- a/distributedcloud/dcdbsync/__init__.py +++ b/distributedcloud/dcdbsync/__init__.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -20,4 +20,4 @@ import pbr.version -__version__ = pbr.version.VersionInfo('distributedcloud').version_string() +__version__ = pbr.version.VersionInfo("distributedcloud").version_string() diff --git a/distributedcloud/dcdbsync/cmd/api.py b/distributedcloud/dcdbsync/cmd/api.py index e395739c2..e7d4f9145 100644 --- a/distributedcloud/dcdbsync/cmd/api.py +++ b/distributedcloud/dcdbsync/cmd/api.py @@ -26,6 +26,7 @@ import logging as std_logging import sys import eventlet + eventlet.monkey_patch(os=False) # pylint: disable=wrong-import-position @@ -38,11 +39,12 @@ from dcdbsync.api import api_config # noqa: E402 from dcdbsync.api import app # noqa: E402 from dcdbsync.common import config # noqa: E402 from dcdbsync.common import messaging # noqa: E402 + # pylint: enable=wrong-import-position CONF = cfg.CONF config.register_options() -LOG = logging.getLogger('dcdbsync.api') +LOG = logging.getLogger("dcdbsync.api") def main(): @@ -58,8 +60,10 @@ def main(): LOG.warning("Wrong worker number, worker = %(workers)s", workers) workers = 1 - LOG.info("Server on http://%(host)s:%(port)s with %(workers)s", - {'host': host, 'port': port, 'workers': workers}) + LOG.info( + "Server on http://%(host)s:%(port)s with %(workers)s", + {"host": host, "port": port, "workers": workers}, + ) messaging.setup() systemd.notify_once() service = wsgi.Server(CONF, "DCDBsync", application, host, port) @@ -73,5 +77,5 @@ def main(): app.wait() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/distributedcloud/dcdbsync/common/config.py b/distributedcloud/dcdbsync/common/config.py index e7a5f430c..1ccebbc81 100644 --- a/distributedcloud/dcdbsync/common/config.py +++ b/distributedcloud/dcdbsync/common/config.py @@ -11,7 +11,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -23,94 +23,88 @@ from oslo_config import cfg from oslo_utils import importutils # Ensure keystonemiddleware options are imported -importutils.import_module('keystonemiddleware.auth_token') +importutils.import_module("keystonemiddleware.auth_token") # OpenStack credentials used for Endpoint Cache # We need to register the below non-standard config # options to dbsync engine keystone_opts = [ - cfg.StrOpt('username', - help='Username of account'), - cfg.StrOpt('password', - help='Password of account'), - cfg.StrOpt('project_name', - help='Tenant name of account'), - cfg.StrOpt('user_domain_name', - default='Default', - help='User domain name of account'), - cfg.StrOpt('project_domain_name', - default='Default', - help='Project domain name of account'), + cfg.StrOpt("username", help="Username of account"), + cfg.StrOpt("password", help="Password of account"), + cfg.StrOpt("project_name", help="Tenant name of account"), + cfg.StrOpt( + "user_domain_name", default="Default", help="User domain name of account" + ), + cfg.StrOpt( + "project_domain_name", default="Default", help="Project domain name of account" + ), ] # Pecan_opts pecan_opts = [ cfg.StrOpt( - 'root', - default='dcdbsync.api.controllers.root.RootController', - help='Pecan root controller' + "root", + default="dcdbsync.api.controllers.root.RootController", + help="Pecan root controller", ), cfg.ListOpt( - 'modules', + "modules", default=["dcdbsync.api"], - help='A list of modules where pecan will search for applications.' + help="A list of modules where pecan will search for applications.", ), cfg.BoolOpt( - 'debug', + "debug", default=False, - help='Enables the ability to display tracebacks in the browser and' - 'interactively debug during development.' + help=( + "Enables the ability to display tracebacks in the browser and " + "interactively debug during development.", + ), ), cfg.BoolOpt( - 'auth_enable', - default=True, - help='Enables user authentication in pecan.' - ) + "auth_enable", default=True, help="Enables user authentication in pecan." + ), ] # OpenStack credentials used for Endpoint Cache cache_opts = [ - cfg.StrOpt('auth_uri', - help='Keystone authorization url'), - cfg.StrOpt('identity_uri', - help='Keystone service url'), - cfg.StrOpt('admin_username', - help='Username of admin account, needed when' - ' auto_refresh_endpoint set to True'), - cfg.StrOpt('admin_password', - help='Password of admin account, needed when' - ' auto_refresh_endpoint set to True'), - cfg.StrOpt('admin_tenant', - help='Tenant name of admin account, needed when' - ' auto_refresh_endpoint set to True'), - cfg.StrOpt('admin_user_domain_name', - default='Default', - help='User domain name of admin account, needed when' - ' auto_refresh_endpoint set to True'), - cfg.StrOpt('admin_project_domain_name', - default='Default', - help='Project domain name of admin account, needed when' - ' auto_refresh_endpoint set to True') + cfg.StrOpt("auth_uri", help="Keystone authorization url"), + cfg.StrOpt("identity_uri", help="Keystone service url"), + cfg.StrOpt( + "admin_username", + help="Username of admin account, needed when auto_refresh_endpoint set to True", + ), + cfg.StrOpt( + "admin_password", + help="Password of admin account, needed when auto_refresh_endpoint set to True", + ), + cfg.StrOpt( + "admin_tenant", + help="Tenant of admin account, needed when auto_refresh_endpoint set to True", + ), + cfg.StrOpt( + "admin_user_domain_name", + default="Default", + help="User domain of admin, needed when auto_refresh_endpoint set to True", + ), + cfg.StrOpt( + "admin_project_domain_name", + default="Default", + help="Project domain of admin, needed when auto_refresh_endpoint set to True", + ), ] common_opts = [ - cfg.IntOpt('workers', default=1, - help='number of workers'), - cfg.StrOpt('host', - default='localhost', - help='hostname of the machine') + cfg.IntOpt("workers", default=1, help="number of workers"), + cfg.StrOpt("host", default="localhost", help="hostname of the machine"), ] -keystone_opt_group = cfg.OptGroup(name='keystone_authtoken', - title='Keystone options') +keystone_opt_group = cfg.OptGroup(name="keystone_authtoken", title="Keystone options") # The group stores the pecan configurations. -pecan_group = cfg.OptGroup(name='pecan', - title='Pecan options') +pecan_group = cfg.OptGroup(name="pecan", title="Pecan options") -cache_opt_group = cfg.OptGroup(name='cache', - title='OpenStack Credentials') +cache_opt_group = cfg.OptGroup(name="cache", title="OpenStack Credentials") def list_opts(): diff --git a/distributedcloud/dcdbsync/common/context.py b/distributedcloud/dcdbsync/common/context.py index 532147022..83e81c78d 100644 --- a/distributedcloud/dcdbsync/common/context.py +++ b/distributedcloud/dcdbsync/common/context.py @@ -10,22 +10,21 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019-2022 Wind River Systems, Inc. +# Copyright (c) 2019-2022, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # -import pecan -from pecan import hooks - from oslo_context import context as base_context from oslo_utils import encodeutils +import pecan +from pecan import hooks from dcdbsync.api.policies import base as base_policy from dcdbsync.api import policy from dcdbsync.db.identity import api as db_api -ALLOWED_WITHOUT_AUTH = '/' +ALLOWED_WITHOUT_AUTH = "/" class RequestContext(base_context.RequestContext): @@ -35,24 +34,47 @@ class RequestContext(base_context.RequestContext): the system, as well as additional request information. """ - def __init__(self, auth_token=None, user=None, project=None, - domain=None, user_domain=None, project_domain=None, - is_admin=None, read_only=False, show_deleted=False, - request_id=None, auth_url=None, trusts=None, - user_name=None, project_name=None, domain_name=None, - user_domain_name=None, project_domain_name=None, - auth_token_info=None, region_name=None, roles=None, - password=None, **kwargs): + def __init__( + self, + auth_token=None, + user=None, + project=None, + domain=None, + user_domain=None, + project_domain=None, + is_admin=None, + read_only=False, + show_deleted=False, + request_id=None, + auth_url=None, + trusts=None, + user_name=None, + project_name=None, + domain_name=None, + user_domain_name=None, + project_domain_name=None, + auth_token_info=None, + region_name=None, + roles=None, + password=None, + **kwargs + ): # Initializer of request context. # We still have 'tenant' param because oslo_context still use it. # pylint: disable=E1123 super(RequestContext, self).__init__( - auth_token=auth_token, user=user, tenant=project, - domain=domain, user_domain=user_domain, - project_domain=project_domain, roles=roles, - read_only=read_only, show_deleted=show_deleted, - request_id=request_id) + auth_token=auth_token, + user=user, + tenant=project, + domain=domain, + user_domain=user_domain, + project_domain=project_domain, + roles=roles, + read_only=read_only, + show_deleted=show_deleted, + request_id=request_id, + ) # request_id might be a byte array self.request_id = encodeutils.safe_decode(self.request_id) @@ -80,8 +102,8 @@ class RequestContext(base_context.RequestContext): # Check user is admin or not if is_admin is None: self.is_admin = policy.authorize( - base_policy.ADMIN_IN_SYSTEM_PROJECTS, {}, self.to_dict(), - do_raise=False) + base_policy.ADMIN_IN_SYSTEM_PROJECTS, {}, self.to_dict(), do_raise=False + ) else: self.is_admin = is_admin @@ -93,26 +115,26 @@ class RequestContext(base_context.RequestContext): def to_dict(self): return { - 'auth_url': self.auth_url, - 'auth_token': self.auth_token, - 'auth_token_info': self.auth_token_info, - 'user': self.user, - 'user_name': self.user_name, - 'user_domain': self.user_domain, - 'user_domain_name': self.user_domain_name, - 'project': self.project, - 'project_name': self.project_name, - 'project_domain': self.project_domain, - 'project_domain_name': self.project_domain_name, - 'domain': self.domain, - 'domain_name': self.domain_name, - 'trusts': self.trusts, - 'region_name': self.region_name, - 'roles': self.roles, - 'show_deleted': self.show_deleted, - 'is_admin': self.is_admin, - 'request_id': self.request_id, - 'password': self.password, + "auth_url": self.auth_url, + "auth_token": self.auth_token, + "auth_token_info": self.auth_token_info, + "user": self.user, + "user_name": self.user_name, + "user_domain": self.user_domain, + "user_domain_name": self.user_domain_name, + "project": self.project, + "project_name": self.project_name, + "project_domain": self.project_domain, + "project_domain_name": self.project_domain_name, + "domain": self.domain, + "domain_name": self.domain_name, + "trusts": self.trusts, + "region_name": self.region_name, + "roles": self.roles, + "show_deleted": self.show_deleted, + "is_admin": self.is_admin, + "request_id": self.request_id, + "password": self.password, } @classmethod @@ -135,14 +157,13 @@ class AuthHook(hooks.PecanHook): if state.request.path == ALLOWED_WITHOUT_AUTH: return req = state.request - identity_status = req.headers.get('X-Identity-Status') - service_identity_status = req.headers.get('X-Service-Identity-Status') - if (identity_status == 'Confirmed' or - service_identity_status == 'Confirmed'): + identity_status = req.headers.get("X-Identity-Status") + service_identity_status = req.headers.get("X-Service-Identity-Status") + if identity_status == "Confirmed" or service_identity_status == "Confirmed": return - if req.headers.get('X-Auth-Token'): - msg = 'Auth token is invalid: %s' % req.headers['X-Auth-Token'] + if req.headers.get("X-Auth-Token"): + msg = "Auth token is invalid: %s" % req.headers["X-Auth-Token"] else: - msg = 'Authentication required' + msg = "Authentication required" msg = "Failed to validate access token: %s" % str(msg) pecan.abort(status_code=401, detail=msg) diff --git a/distributedcloud/dcdbsync/common/exceptions.py b/distributedcloud/dcdbsync/common/exceptions.py index 680d4ea63..d691dc32f 100644 --- a/distributedcloud/dcdbsync/common/exceptions.py +++ b/distributedcloud/dcdbsync/common/exceptions.py @@ -86,10 +86,11 @@ class RoleNotFound(NotFound): class ProjectRoleAssignmentNotFound(NotFound): - message = _("Project role assignment with id" - " %(project_role_assignment_id)s doesn't exist.") + message = _( + "Project role assignment with id " + "%(project_role_assignment_id)s doesn't exist." + ) class RevokeEventNotFound(NotFound): - message = _("Token revocation event with id %(revoke_event_id)s" - " doesn't exist.") + message = _("Token revocation event with id %(revoke_event_id)s doesn't exist.") diff --git a/distributedcloud/dcdbsync/common/i18n.py b/distributedcloud/dcdbsync/common/i18n.py index 3d5a2b9e0..3f86697a8 100644 --- a/distributedcloud/dcdbsync/common/i18n.py +++ b/distributedcloud/dcdbsync/common/i18n.py @@ -12,14 +12,14 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import oslo_i18n -_translators = oslo_i18n.TranslatorFactory(domain='dbsync') +_translators = oslo_i18n.TranslatorFactory(domain="dbsync") # The primary translation function using the well-known name "_" _ = _translators.primary diff --git a/distributedcloud/dcdbsync/common/messaging.py b/distributedcloud/dcdbsync/common/messaging.py index bbf039a93..fffee6fe5 100644 --- a/distributedcloud/dcdbsync/common/messaging.py +++ b/distributedcloud/dcdbsync/common/messaging.py @@ -10,13 +10,12 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import eventlet - from oslo_config import cfg import oslo_messaging from oslo_serialization import jsonutils @@ -66,11 +65,12 @@ def setup(url=None, optional=False): eventlet.monkey_patch(time=True) if not TRANSPORT: - oslo_messaging.set_transport_defaults('dcdbsync') - exmods = ['dcdbsync.common.exception'] + oslo_messaging.set_transport_defaults("dcdbsync") + exmods = ["dcdbsync.common.exception"] try: TRANSPORT = oslo_messaging.get_transport( - cfg.CONF, url, allowed_remote_exmods=exmods) + cfg.CONF, url, allowed_remote_exmods=exmods + ) except oslo_messaging.InvalidTransportURL as e: TRANSPORT = None if not optional or e.url: @@ -92,17 +92,16 @@ def cleanup(): def get_rpc_server(target, endpoint): """Return a configured oslo_messaging rpc server.""" serializer = RequestContextSerializer(JsonPayloadSerializer()) - return oslo_messaging.get_rpc_server(TRANSPORT, target, [endpoint], - executor='eventlet', - serializer=serializer) + return oslo_messaging.get_rpc_server( + TRANSPORT, target, [endpoint], executor="eventlet", serializer=serializer + ) def get_rpc_client(**kwargs): """Return a configured oslo_messaging RPCClient.""" target = oslo_messaging.Target(**kwargs) serializer = RequestContextSerializer(JsonPayloadSerializer()) - return oslo_messaging.RPCClient(TRANSPORT, target, - serializer=serializer) + return oslo_messaging.RPCClient(TRANSPORT, target, serializer=serializer) def get_notifier(publisher_id): diff --git a/distributedcloud/dcdbsync/common/version.py b/distributedcloud/dcdbsync/common/version.py index 5c64c4077..72f0d992d 100644 --- a/distributedcloud/dcdbsync/common/version.py +++ b/distributedcloud/dcdbsync/common/version.py @@ -12,7 +12,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -23,7 +23,7 @@ DBSYNC_VENDOR = "Wind River Systems" DBSYNC_PRODUCT = "Distributed Cloud DBsync Agent" DBSYNC_PACKAGE = None # OS distro package version suffix -version_info = pbr.version.VersionInfo('distributedcloud') +version_info = pbr.version.VersionInfo("distributedcloud") version_string = version_info.version_string diff --git a/distributedcloud/dcdbsync/db/identity/api.py b/distributedcloud/dcdbsync/db/identity/api.py index 569a0865d..e836fad60 100644 --- a/distributedcloud/dcdbsync/db/identity/api.py +++ b/distributedcloud/dcdbsync/db/identity/api.py @@ -13,7 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019-2021 Wind River Systems, Inc. +# Copyright (c) 2019-2021, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -30,7 +30,7 @@ from oslo_db import api CONF = cfg.CONF -_BACKEND_MAPPING = {'sqlalchemy': 'dcdbsync.db.identity.sqlalchemy.api'} +_BACKEND_MAPPING = {"sqlalchemy": "dcdbsync.db.identity.sqlalchemy.api"} IMPL = api.DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING) @@ -49,6 +49,7 @@ def get_session(): ################### + def user_get_all(context): """Retrieve all users.""" return IMPL.user_get_all(context) @@ -75,6 +76,7 @@ def user_update(context, user_ref, payload): ################### + def group_get_all(context): """Retrieve all groups.""" return IMPL.group_get_all(context) @@ -101,6 +103,7 @@ def group_update(context, group_ref, payload): ################### + def project_get_all(context): """Retrieve all projects.""" return IMPL.project_get_all(context) @@ -127,6 +130,7 @@ def project_update(context, project_ref, payload): ################### + def role_get_all(context): """Retrieve all roles.""" return IMPL.role_get_all(context) @@ -153,6 +157,7 @@ def role_update(context, role_ref, payload): ################### + def revoke_event_get_all(context): """Retrieve all token revocation events.""" return IMPL.revoke_event_get_all(context) diff --git a/distributedcloud/dcdbsync/db/identity/sqlalchemy/api.py b/distributedcloud/dcdbsync/db/identity/sqlalchemy/api.py index 6c0f08211..d69d24c36 100644 --- a/distributedcloud/dcdbsync/db/identity/sqlalchemy/api.py +++ b/distributedcloud/dcdbsync/db/identity/sqlalchemy/api.py @@ -53,6 +53,7 @@ def _get_context(): global _CONTEXT if _CONTEXT is None: import threading + _CONTEXT = threading.local() return _CONTEXT @@ -65,11 +66,7 @@ class TableRegistry(object): try: table = self.metadata.tables[tablename] except KeyError: - table = Table( - tablename, - self.metadata, - autoload_with=connection - ) + table = Table(tablename, self.metadata, autoload_with=connection) return table @@ -155,9 +152,8 @@ def get_backend(): def is_admin_context(context): """Indicate if the request context is an administrator.""" if not context: - LOG.warning(_('Use of empty request context is deprecated'), - DeprecationWarning) - raise Exception('die') + LOG.warning(_("Use of empty request context is deprecated"), DeprecationWarning) + raise Exception("die") return context.is_admin @@ -177,6 +173,7 @@ def require_admin_context(f): The first argument to the wrapped function must be the context. """ + def wrapper(*args, **kwargs): if not is_admin_context(args[0]): raise exception.AdminRequired() @@ -194,6 +191,7 @@ def require_context(f): The first argument to the wrapped function must be the context. """ + def wrapper(*args, **kwargs): if not is_admin_context(args[0]) and not is_user_context(args[0]): raise exception.NotAuthorized() @@ -208,27 +206,33 @@ def require_context(f): ################### + @require_context def user_get_all(context): result = [] with get_read_connection() as conn: # user table - users = query(conn, 'user') + users = query(conn, "user") # local_user table - local_users = query(conn, 'local_user') + local_users = query(conn, "local_user") # password table - passwords = query(conn, 'password') + passwords = query(conn, "password") for local_user in local_users: - user = {'user': user for user in users if user['id'] - == local_user['user_id']} - user_passwords = {'password': [password for password in passwords - if password['local_user_id'] == - local_user['id']]} - user_consolidated = dict(list({'local_user': local_user}.items()) + - list(user.items()) + - list(user_passwords.items())) + user = {"user": user for user in users if user["id"] == local_user["user_id"]} + user_passwords = { + "password": [ + password + for password in passwords + if password["local_user_id"] == local_user["id"] + ] + } + user_consolidated = dict( + list({"local_user": local_user}.items()) + + list(user.items()) + + list(user_passwords.items()) + ) result.append(user_consolidated) return result @@ -240,113 +244,111 @@ def user_get(context, user_id): with get_read_connection() as conn: # user table - users = query(conn, 'user', 'id', user_id) + users = query(conn, "user", "id", user_id) if not users: raise exception.UserNotFound(user_id=user_id) - result['user'] = users[0] + result["user"] = users[0] # local_user table - local_users = query(conn, 'local_user', 'user_id', user_id) + local_users = query(conn, "local_user", "user_id", user_id) if not local_users: raise exception.UserNotFound(user_id=user_id) - result['local_user'] = local_users[0] + result["local_user"] = local_users[0] # password table - result['password'] = [] - if result['local_user']: - result['password'] = query(conn, 'password', - 'local_user_id', - result['local_user'].get('id')) + result["password"] = [] + if result["local_user"]: + result["password"] = query( + conn, "password", "local_user_id", result["local_user"].get("id") + ) return result @require_admin_context def user_create(context, payload): - users = [payload['user']] - local_users = [payload['local_user']] - passwords = payload['password'] + users = [payload["user"]] + local_users = [payload["local_user"]] + passwords = payload["password"] with get_write_connection() as conn: - insert(conn, 'user', users) + insert(conn, "user", users) # ignore auto generated id for local_user in local_users: - local_user.pop('id', None) - insert(conn, 'local_user', local_users) + local_user.pop("id", None) + insert(conn, "local_user", local_users) - inserted_local_users = query(conn, 'local_user', 'user_id', - payload['local_user']['user_id']) + inserted_local_users = query( + conn, "local_user", "user_id", payload["local_user"]["user_id"] + ) if not inserted_local_users: - raise exception.UserNotFound(user_id=payload['local_user'] - ['user_id']) + raise exception.UserNotFound(user_id=payload["local_user"]["user_id"]) for password in passwords: # ignore auto generated id - password.pop('id', None) - password['local_user_id'] = inserted_local_users[0]['id'] + password.pop("id", None) + password["local_user_id"] = inserted_local_users[0]["id"] - insert(conn, 'password', passwords) + insert(conn, "password", passwords) - return user_get(context, payload['user']['id']) + return user_get(context, payload["user"]["id"]) @require_admin_context def user_update(context, user_id, payload): with get_write_connection() as conn: # user table - table = 'user' + table = "user" new_user_id = user_id if table in payload: user_options = [] user = payload[table] - new_user_id = user.get('id') + new_user_id = user.get("id") if user_id != new_user_id: # Delete the user_option record referencing to the old user_id # to avoid the foreign key constraint violation when we update # the user table in the next step. - user_options = query(conn, 'user_option', 'user_id', user_id) - delete(conn, 'user_option', 'user_id', user_id) + user_options = query(conn, "user_option", "user_id", user_id) + delete(conn, "user_option", "user_id", user_id) else: - user.pop('id', None) - update(conn, table, 'id', user_id, user) + user.pop("id", None) + update(conn, table, "id", user_id, user) if user_options: for user_option in user_options: - user_option['user_id'] = new_user_id - insert(conn, 'user_option', user_option) + user_option["user_id"] = new_user_id + insert(conn, "user_option", user_option) # local_user table - table = 'local_user' + table = "local_user" if table in payload: local_user = payload[table] # ignore auto generated id - local_user.pop('id', None) - update(conn, table, 'user_id', user_id, local_user) - updated_local_users = query(conn, table, 'user_id', - new_user_id) + local_user.pop("id", None) + update(conn, table, "user_id", user_id, local_user) + updated_local_users = query(conn, table, "user_id", new_user_id) if not updated_local_users: - raise exception.UserNotFound(user_id=payload[table]['user_id']) + raise exception.UserNotFound(user_id=payload[table]["user_id"]) # password table - table = 'password' + table = "password" if table in payload: - delete(conn, table, 'local_user_id', - updated_local_users[0]['id']) + delete(conn, table, "local_user_id", updated_local_users[0]["id"]) passwords = payload[table] for password in passwords: # ignore auto generated ids - password.pop('id', None) - password['local_user_id'] = \ - updated_local_users[0]['id'] + password.pop("id", None) + password["local_user_id"] = updated_local_users[0]["id"] insert(conn, table, password) # Need to update the actor_id in assignment and system_assignment # along with the user_id in user_group_membership tables if the # user id is updated if user_id != new_user_id: - assignment = {'actor_id': new_user_id} - user_group_membership = {'user_id': new_user_id} - update(conn, 'assignment', 'actor_id', user_id, assignment) - update(conn, 'system_assignment', 'actor_id', user_id, assignment) - update(conn, 'user_group_membership', 'user_id', - user_id, user_group_membership) + assignment = {"actor_id": new_user_id} + user_group_membership = {"user_id": new_user_id} + update(conn, "assignment", "actor_id", user_id, assignment) + update(conn, "system_assignment", "actor_id", user_id, assignment) + update( + conn, "user_group_membership", "user_id", user_id, user_group_membership + ) return user_get(context, new_user_id) @@ -357,24 +359,28 @@ def user_update(context, user_id, payload): ################### + @require_context def group_get_all(context): result = [] with get_read_connection() as conn: # groups table - groups = query(conn, 'group') + groups = query(conn, "group") # user_group_membership table - user_group_memberships = query(conn, 'user_group_membership') + user_group_memberships = query(conn, "user_group_membership") for group in groups: - local_user_id_list = [membership['user_id'] for membership - in user_group_memberships if - membership['group_id'] == group['id']] + local_user_id_list = [ + membership["user_id"] + for membership in user_group_memberships + if membership["group_id"] == group["id"] + ] local_user_id_list.sort() - local_user_ids = {'local_user_ids': local_user_id_list} - group_consolidated = dict(list({'group': group}.items()) + - list(local_user_ids.items())) + local_user_ids = {"local_user_ids": local_user_id_list} + group_consolidated = dict( + list({"group": group}.items()) + list(local_user_ids.items()) + ) result.append(group_consolidated) return result @@ -388,59 +394,62 @@ def group_get(context, group_id): local_user_id_list = [] # group table - group = query(conn, 'group', 'id', group_id) + group = query(conn, "group", "id", group_id) if not group: raise exception.GroupNotFound(group_id=group_id) - result['group'] = group[0] + result["group"] = group[0] # user_group_membership table - user_group_memberships = query(conn, 'user_group_membership', - 'group_id', group_id) + user_group_memberships = query( + conn, "user_group_membership", "group_id", group_id + ) for user_group_membership in user_group_memberships: - local_user = query(conn, 'local_user', 'user_id', - user_group_membership.get('user_id')) + local_user = query( + conn, "local_user", "user_id", user_group_membership.get("user_id") + ) if not local_user: - raise exception.UserNotFound(user_id=user_group_membership.get( - 'user_id')) - local_user_id_list.append(local_user[0]['user_id']) + raise exception.UserNotFound( + user_id=user_group_membership.get("user_id") + ) + local_user_id_list.append(local_user[0]["user_id"]) - result['local_user_ids'] = local_user_id_list + result["local_user_ids"] = local_user_id_list return result @require_admin_context def group_create(context, payload): - group = payload['group'] - local_user_ids = payload['local_user_ids'] + group = payload["group"] + local_user_ids = payload["local_user_ids"] with get_write_connection() as conn: - insert(conn, 'group', group) + insert(conn, "group", group) for local_user_id in local_user_ids: - user_group_membership = {'user_id': local_user_id, - 'group_id': group['id']} - insert(conn, 'user_group_membership', user_group_membership) + user_group_membership = {"user_id": local_user_id, "group_id": group["id"]} + insert(conn, "user_group_membership", user_group_membership) - return group_get(context, payload['group']['id']) + return group_get(context, payload["group"]["id"]) @require_admin_context def group_update(context, group_id, payload): with get_write_connection() as conn: new_group_id = group_id - if 'group' in payload and 'local_user_ids' in payload: - group = payload['group'] - new_group_id = group.get('id') + if "group" in payload and "local_user_ids" in payload: + group = payload["group"] + new_group_id = group.get("id") # local_user_id_list is a sorted list of user IDs that # belong to this group - local_user_id_list = payload['local_user_ids'] - user_group_memberships = query(conn, 'user_group_membership', - 'group_id', group_id) + local_user_id_list = payload["local_user_ids"] + user_group_memberships = query( + conn, "user_group_membership", "group_id", group_id + ) existing_user_list = [ - user_group_membership['user_id'] for user_group_membership in - user_group_memberships + user_group_membership["user_id"] + for user_group_membership in user_group_memberships ] existing_user_list.sort() deleted = False @@ -449,24 +458,23 @@ def group_update(context, group_id, payload): # before updating group if groups IDs are different. # Alternatively, if there is a discrepency in the user group memberships, # delete and re-create them - if (group_id != new_group_id) or ( - local_user_id_list != existing_user_list): - delete(conn, 'user_group_membership', 'group_id', group_id) + if (group_id != new_group_id) or (local_user_id_list != existing_user_list): + delete(conn, "user_group_membership", "group_id", group_id) deleted = True # Update group table - update(conn, 'group', 'id', group_id, group) + update(conn, "group", "id", group_id, group) if deleted: for local_user_id in local_user_id_list: - item = {'user_id': local_user_id, 'group_id': new_group_id} - insert(conn, 'user_group_membership', item) + item = {"user_id": local_user_id, "group_id": new_group_id} + insert(conn, "user_group_membership", item) # Need to update the actor_id in assignment and system_assignment # tables if the group id is updated if group_id != new_group_id: - assignment = {'actor_id': new_group_id} - update(conn, 'assignment', 'actor_id', group_id, assignment) - update(conn, 'system_assignment', 'actor_id', group_id, assignment) + assignment = {"actor_id": new_group_id} + update(conn, "assignment", "actor_id", group_id, assignment) + update(conn, "system_assignment", "actor_id", group_id, assignment) return group_get(context, new_group_id) @@ -477,16 +485,17 @@ def group_update(context, group_id, payload): ################### + @require_context def project_get_all(context): result = [] with get_read_connection() as conn: # project table - projects = query(conn, 'project') + projects = query(conn, "project") for project in projects: - project_consolidated = {'project': project} + project_consolidated = {"project": project} result.append(project_consolidated) return result @@ -498,29 +507,29 @@ def project_get(context, project_id): with get_read_connection() as conn: # project table - projects = query(conn, 'project', 'id', project_id) + projects = query(conn, "project", "id", project_id) if not projects: raise exception.ProjectNotFound(project_id=project_id) - result['project'] = projects[0] + result["project"] = projects[0] return result @require_admin_context def project_create(context, payload): - projects = [payload['project']] + projects = [payload["project"]] with get_write_connection() as conn: - insert(conn, 'project', projects) + insert(conn, "project", projects) - return project_get(context, payload['project']['id']) + return project_get(context, payload["project"]["id"]) @require_admin_context def project_update(context, project_id, payload): with get_write_connection() as conn: # project table - table = 'project' + table = "project" new_project_id = project_id if table in payload: domain_ref_projects = [] @@ -528,53 +537,51 @@ def project_update(context, project_id, payload): domain_ref_users = [] domain_ref_local_users = [] project = payload[table] - new_project_id = project.get('id') + new_project_id = project.get("id") if project_id != new_project_id: - domain_ref_projects = query(conn, 'project', 'domain_id', - project_id) - delete(conn, 'project', 'domain_id', project_id) - parent_ref_projects = query(conn, 'project', 'parent_id', - project_id) - delete(conn, 'project', 'parent_id', project_id) + domain_ref_projects = query(conn, "project", "domain_id", project_id) + delete(conn, "project", "domain_id", project_id) + parent_ref_projects = query(conn, "project", "parent_id", project_id) + delete(conn, "project", "parent_id", project_id) # For user table: CONSTRAINT `user_ibfk_1` # FOREIGN KEY(`domain_id`) REFERENCES `project`(`id`) - domain_ref_users = query(conn, 'user', 'domain_id', - project_id) - domain_ref_local_users = query(conn, 'local_user', - 'domain_id', project_id) - delete(conn, 'user', 'domain_id', project_id) + domain_ref_users = query(conn, "user", "domain_id", project_id) + domain_ref_local_users = query( + conn, "local_user", "domain_id", project_id + ) + delete(conn, "user", "domain_id", project_id) # Update project table - update(conn, table, 'id', project_id, project) + update(conn, table, "id", project_id, project) # Update saved records from project table and insert them back if domain_ref_projects: for domain_ref_project in domain_ref_projects: - domain_ref_project['domain_id'] = new_project_id - if domain_ref_project['parent_id'] == project_id: - domain_ref_project['parent_id'] = new_project_id - insert(conn, 'project', domain_ref_projects) + domain_ref_project["domain_id"] = new_project_id + if domain_ref_project["parent_id"] == project_id: + domain_ref_project["parent_id"] = new_project_id + insert(conn, "project", domain_ref_projects) if parent_ref_projects: for parent_ref_project in parent_ref_projects: - parent_ref_project['parent_id'] = new_project_id - if parent_ref_project['domain_id'] == project_id: - parent_ref_project['domain_id'] = new_project_id - insert(conn, 'project', parent_ref_projects) + parent_ref_project["parent_id"] = new_project_id + if parent_ref_project["domain_id"] == project_id: + parent_ref_project["domain_id"] = new_project_id + insert(conn, "project", parent_ref_projects) if domain_ref_users: for domain_ref_user in domain_ref_users: - domain_ref_user['domain_id'] = new_project_id - insert(conn, 'user', domain_ref_users) + domain_ref_user["domain_id"] = new_project_id + insert(conn, "user", domain_ref_users) if domain_ref_local_users: for domain_ref_local_user in domain_ref_local_users: - domain_ref_local_user['domain_id'] = new_project_id - insert(conn, 'local_user', domain_ref_local_users) + domain_ref_local_user["domain_id"] = new_project_id + insert(conn, "local_user", domain_ref_local_users) # Need to update the target_id in assignment table # if the project id is updated if project_id != new_project_id: - table = 'assignment' - assignment = {'target_id': new_project_id} - update(conn, table, 'target_id', project_id, assignment) + table = "assignment" + assignment = {"target_id": new_project_id} + update(conn, table, "target_id", project_id, assignment) return project_get(context, new_project_id) @@ -585,16 +592,17 @@ def project_update(context, project_id, payload): ################### + @require_context def role_get_all(context): result = [] with get_read_connection() as conn: # role table - roles = query(conn, 'role') + roles = query(conn, "role") for role in roles: - role_consolidated = {'role': role} + role_consolidated = {"role": role} result.append(role_consolidated) return result @@ -606,77 +614,75 @@ def role_get(context, role_id): with get_read_connection() as conn: # role table - roles = query(conn, 'role', 'id', role_id) + roles = query(conn, "role", "id", role_id) if not roles: raise exception.RoleNotFound(role_id=role_id) - result['role'] = roles[0] + result["role"] = roles[0] return result @require_admin_context def role_create(context, payload): - roles = [payload['role']] + roles = [payload["role"]] with get_write_connection() as conn: - insert(conn, 'role', roles) + insert(conn, "role", roles) - return role_get(context, payload['role']['id']) + return role_get(context, payload["role"]["id"]) @require_admin_context def role_update(context, role_id, payload): with get_write_connection() as conn: # role table - table = 'role' + table = "role" new_role_id = role_id if table in payload: prior_roles = [] implied_roles = [] role_options = [] role = payload[table] - new_role_id = role.get('id') + new_role_id = role.get("id") if role_id != new_role_id: # implied_role table has foreign key references to role table. # The foreign key references are on DELETE CASCADE only. To # avoid foreign key constraints violation, save these records # from implied_role table, delete them, update role table, # update and insert them back after role table is updated. - prior_roles = query(conn, 'implied_role', 'prior_role_id', - role_id) - delete(conn, 'implied_role', 'prior_role_id', role_id) - implied_roles = query(conn, 'implied_role', 'implied_role_id', - role_id) - delete(conn, 'implied_role', 'implied_role_id', role_id) + prior_roles = query(conn, "implied_role", "prior_role_id", role_id) + delete(conn, "implied_role", "prior_role_id", role_id) + implied_roles = query(conn, "implied_role", "implied_role_id", role_id) + delete(conn, "implied_role", "implied_role_id", role_id) # Delete the role_option record referencing to the old role_id # to avoid the foreign key constraint violation when we update # the role table in the next step. - role_options = query(conn, 'role_option', 'role_id', role_id) - delete(conn, 'role_option', 'role_id', role_id) + role_options = query(conn, "role_option", "role_id", role_id) + delete(conn, "role_option", "role_id", role_id) else: - role.pop('id', None) + role.pop("id", None) # Update role table - update(conn, table, 'id', role_id, role) + update(conn, table, "id", role_id, role) # Update saved records from implied_role table and insert them back if prior_roles: for prior_role in prior_roles: - prior_role['prior_role_id'] = new_role_id - insert(conn, 'implied_role', prior_roles) + prior_role["prior_role_id"] = new_role_id + insert(conn, "implied_role", prior_roles) if implied_roles: for implied_role in implied_roles: - implied_role['implied_role_id'] = new_role_id - insert(conn, 'implied_role', implied_roles) + implied_role["implied_role_id"] = new_role_id + insert(conn, "implied_role", implied_roles) if role_options: for role_option in role_options: - role_option['role_id'] = new_role_id - insert(conn, 'role_option', role_option) + role_option["role_id"] = new_role_id + insert(conn, "role_option", role_option) # Need to update the role_id in assignment and system_assignment tables # if the role id is updated if role_id != new_role_id: - assignment = {'role_id': new_role_id} - update(conn, 'assignment', 'role_id', role_id, assignment) - update(conn, 'system_assignment', 'role_id', role_id, assignment) + assignment = {"role_id": new_role_id} + update(conn, "assignment", "role_id", role_id, assignment) + update(conn, "system_assignment", "role_id", role_id, assignment) return role_get(context, new_role_id) @@ -687,16 +693,17 @@ def role_update(context, role_id, payload): ################################## + @require_context def revoke_event_get_all(context): result = [] with get_read_connection() as conn: # revocation_event table - revoke_events = query(conn, 'revocation_event') + revoke_events = query(conn, "revocation_event") for revoke_event in revoke_events: - revoke_event_consolidated = {'revocation_event': revoke_event} + revoke_event_consolidated = {"revocation_event": revoke_event} result.append(revoke_event_consolidated) return result @@ -708,11 +715,10 @@ def revoke_event_get_by_audit(context, audit_id): with get_read_connection() as conn: # revocation_event table - revoke_events = query(conn, 'revocation_event', 'audit_id', - audit_id) + revoke_events = query(conn, "revocation_event", "audit_id", audit_id) if not revoke_events: raise exception.RevokeEventNotFound() - result['revocation_event'] = revoke_events[0] + result["revocation_event"] = revoke_events[0] return result @@ -723,48 +729,49 @@ def revoke_event_get_by_user(context, user_id, issued_before): with get_read_connection() as conn: # revocation_event table - events = query(conn, 'revocation_event', 'user_id', user_id) - revoke_events = [event for event in events if - str(event['issued_before']) == issued_before] + events = query(conn, "revocation_event", "user_id", user_id) + revoke_events = [ + event for event in events if str(event["issued_before"]) == issued_before + ] if not revoke_events: raise exception.RevokeEventNotFound() - result['revocation_event'] = revoke_events[0] + result["revocation_event"] = revoke_events[0] return result @require_admin_context def revoke_event_create(context, payload): - revoke_event = payload['revocation_event'] + revoke_event = payload["revocation_event"] # ignore auto generated id - revoke_event.pop('id', None) + revoke_event.pop("id", None) revoke_events = [revoke_event] with get_write_connection() as conn: - insert(conn, 'revocation_event', revoke_events) + insert(conn, "revocation_event", revoke_events) result = {} - if revoke_event.get('audit_id') is not None: - result = revoke_event_get_by_audit(context, - revoke_event.get('audit_id')) - elif (revoke_event.get('user_id') is not None) and \ - (revoke_event.get('issued_before') is not None): - result = revoke_event_get_by_user(context, - revoke_event.get('user_id'), - revoke_event.get('issued_before')) + if revoke_event.get("audit_id") is not None: + result = revoke_event_get_by_audit(context, revoke_event.get("audit_id")) + elif (revoke_event.get("user_id") is not None) and ( + revoke_event.get("issued_before") is not None + ): + result = revoke_event_get_by_user( + context, revoke_event.get("user_id"), revoke_event.get("issued_before") + ) return result @require_admin_context def revoke_event_delete_by_audit(context, audit_id): with get_write_connection() as conn: - delete(conn, 'revocation_event', 'audit_id', audit_id) + delete(conn, "revocation_event", "audit_id", audit_id) @require_admin_context def revoke_event_delete_by_user(context, user_id, issued_before): result = revoke_event_get_by_user(context, user_id, issued_before) - event_id = result['revocation_event']['id'] + event_id = result["revocation_event"]["id"] with get_write_connection() as conn: - delete(conn, 'revocation_event', 'id', event_id) + delete(conn, "revocation_event", "id", event_id) diff --git a/distributedcloud/dcdbsync/db/identity/utils.py b/distributedcloud/dcdbsync/db/identity/utils.py index f32f1b80e..dcf2f54ab 100644 --- a/distributedcloud/dcdbsync/db/identity/utils.py +++ b/distributedcloud/dcdbsync/db/identity/utils.py @@ -13,7 +13,7 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019, 2022 Wind River Systems, Inc. +# Copyright (c) 2019, 2022, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -50,8 +50,8 @@ class LazyPluggable(object): return getattr(backend, key) -IMPL = LazyPluggable('backend', sqlalchemy='dcdbsync.db.sqlalchemy.api') +IMPL = LazyPluggable("backend", sqlalchemy="dcdbsync.db.sqlalchemy.api") -def purge_deleted(age, granularity='days'): +def purge_deleted(age, granularity="days"): IMPL.purge_deleted(age, granularity) diff --git a/distributedcloud/dcdbsync/dbsyncclient/base.py b/distributedcloud/dcdbsync/dbsyncclient/base.py index d2ab7dec4..e314929e2 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/base.py +++ b/distributedcloud/dcdbsync/dbsyncclient/base.py @@ -26,7 +26,7 @@ from dcdbsync.dbsyncclient import exceptions class Resource(object): # This will be overridden by the actual resource - resource_name = 'Something' + resource_name = "Something" class ResourceManager(object): @@ -40,9 +40,11 @@ class ResourceManager(object): resource = [] for json_object in json_objects: for resource_data in json_object: - resource.append(self.resource_class( # pylint: disable=E1102 - self, resource_data, - json_object[resource_data])) + resource.append( + self.resource_class( # pylint: disable=E1102 + self, resource_data, json_object[resource_data] + ) + ) return resource def _list(self, url, response_key=None): @@ -75,11 +77,15 @@ class ResourceManager(object): json_objects = [json_response_key[item] for item in json_response_key] resource = [] for json_object in json_objects: - for values in json_object.get('usage').keys(): - resource.append(self.resource_class( # pylint: disable=E1102 - self, values, - json_object['limits'][values], - json_object['usage'][values])) + for values in json_object.get("usage").keys(): + resource.append( + self.resource_class( # pylint: disable=E1102 + self, + values, + json_object["limits"][values], + json_object["usage"][values], + ) + ) return resource def _delete(self, url): @@ -89,23 +95,24 @@ class ResourceManager(object): def _raise_api_exception(self, resp): error_html = resp.content - soup = BeautifulSoup(error_html, 'html.parser') + soup = BeautifulSoup(error_html, "html.parser") # Get the raw html with get_text, strip out the blank lines on # front and back, then get rid of the 2 lines of error code number # and error code explanation so that we are left with just the # meaningful error text. try: - error_msg = soup.body.get_text().lstrip().rstrip().split('\n')[2] + error_msg = soup.body.get_text().lstrip().rstrip().split("\n")[2] except Exception: error_msg = resp.content - raise exceptions.APIException(error_code=resp.status_code, - error_message=error_msg) + raise exceptions.APIException( + error_code=resp.status_code, error_message=error_msg + ) def get_json(response): """Get JSON representation of response.""" - json_field_or_function = getattr(response, 'json', None) + json_field_or_function = getattr(response, "json", None) if callable(json_field_or_function): return response.json() else: diff --git a/distributedcloud/dcdbsync/dbsyncclient/client.py b/distributedcloud/dcdbsync/dbsyncclient/client.py index 0dd83a72e..f6ba2a8dc 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/client.py +++ b/distributedcloud/dcdbsync/dbsyncclient/client.py @@ -20,14 +20,28 @@ from dcdbsync.dbsyncclient.v1 import client as client_v1 -def Client(dbsync_agent_url=None, username=None, api_key=None, - project_name=None, auth_url=None, project_id=None, - endpoint_type='publicURL', service_type='dcorch-dbsync', - auth_token=None, user_id=None, cacert=None, insecure=False, - profile=None, auth_type='keystone', client_id=None, - client_secret=None, session=None, **kwargs): +def Client( + dbsync_agent_url=None, + username=None, + api_key=None, + project_name=None, + auth_url=None, + project_id=None, + endpoint_type="publicURL", + service_type="dcorch-dbsync", + auth_token=None, + user_id=None, + cacert=None, + insecure=False, + profile=None, + auth_type="keystone", + client_id=None, + client_secret=None, + session=None, + **kwargs +): if dbsync_agent_url and not isinstance(dbsync_agent_url, str): - raise RuntimeError('DC DBsync agent url should be a string.') + raise RuntimeError("DC DBsync agent url should be a string.") return client_v1.Client( dbsync_agent_url=dbsync_agent_url, diff --git a/distributedcloud/dcdbsync/dbsyncclient/exceptions.py b/distributedcloud/dcdbsync/dbsyncclient/exceptions.py index 63a0a8558..beaaabea6 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/exceptions.py +++ b/distributedcloud/dcdbsync/dbsyncclient/exceptions.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019-2020 Wind River Systems, Inc. +# Copyright (c) 2019-2020, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -24,6 +24,7 @@ class DBsyncClientException(Exception): To correctly use this class, inherit from it and define a 'message' and 'code' properties. """ + message = "An unknown exception occurred" code = "UNKNOWN_EXCEPTION" @@ -33,7 +34,8 @@ class DBsyncClientException(Exception): def __init__(self, message=message): self.message = message super(DBsyncClientException, self).__init__( - '%s: %s' % (self.code, self.message)) + "%s: %s" % (self.code, self.message) + ) class IllegalArgumentException(DBsyncClientException): diff --git a/distributedcloud/dcdbsync/dbsyncclient/httpclient.py b/distributedcloud/dcdbsync/dbsyncclient/httpclient.py index 0a0a4ca5d..43bf7f2c9 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/httpclient.py +++ b/distributedcloud/dcdbsync/dbsyncclient/httpclient.py @@ -36,16 +36,23 @@ LOG = logging.getLogger(__name__) def log_request(func): def decorator(self, *args, **kwargs): resp = func(self, *args, **kwargs) - LOG.debug("HTTP %s %s %d" % (resp.request.method, resp.url, - resp.status_code)) + LOG.debug("HTTP %s %s %d" % (resp.request.method, resp.url, resp.status_code)) return resp return decorator class HTTPClient(object): - def __init__(self, base_url, token=None, project_id=None, user_id=None, - cacert=None, insecure=False, request_timeout=None): + def __init__( + self, + base_url, + token=None, + project_id=None, + user_id=None, + cacert=None, + insecure=False, + request_timeout=None, + ): self.base_url = base_url self.token = token self.project_id = project_id @@ -53,117 +60,117 @@ class HTTPClient(object): self.ssl_options = {} self.request_timeout = request_timeout - if self.base_url.startswith('https'): + if self.base_url.startswith("https"): if cacert and not os.path.exists(cacert): - raise ValueError('Unable to locate cacert file ' - 'at %s.' % cacert) + raise ValueError("Unable to locate cacert file at %s." % cacert) if cacert and insecure: - LOG.warning('Client is set to not verify even though ' - 'cacert is provided.') + LOG.warning( + "Client is set to not verify even though cacert is provided." + ) - self.ssl_options['verify'] = not insecure - self.ssl_options['cert'] = cacert + self.ssl_options["verify"] = not insecure + self.ssl_options["cert"] = cacert @log_request def get(self, url, headers=None): - options = self._get_request_options('get', headers) + options = self._get_request_options("get", headers) try: url = self.base_url + url timeout = self.request_timeout return requests.get(url, timeout=timeout, **options) except requests.exceptions.Timeout: - msg = 'Request to %s timed out' % url + msg = "Request to %s timed out" % url raise exceptions.ConnectTimeout(msg) except requests.exceptions.ConnectionError as e: - msg = 'Unable to establish connection to %s: %s' % (url, e) + msg = "Unable to establish connection to %s: %s" % (url, e) raise exceptions.ConnectFailure(msg) except requests.exceptions.RequestException as e: - msg = 'Unexpected exception for %s: %s' % (url, e) + msg = "Unexpected exception for %s: %s" % (url, e) raise exceptions.UnknownConnectionError(msg) @log_request def post(self, url, body, headers=None): - options = self._get_request_options('post', headers) + options = self._get_request_options("post", headers) try: url = self.base_url + url timeout = self.request_timeout return requests.post(url, body, timeout=timeout, **options) except requests.exceptions.Timeout: - msg = 'Request to %s timed out' % url + msg = "Request to %s timed out" % url raise exceptions.ConnectTimeout(msg) except requests.exceptions.ConnectionError as e: - msg = 'Unable to establish connection to %s: %s' % (url, e) + msg = "Unable to establish connection to %s: %s" % (url, e) raise exceptions.ConnectFailure(msg) except requests.exceptions.RequestException as e: - msg = 'Unexpected exception for %s: %s' % (url, e) + msg = "Unexpected exception for %s: %s" % (url, e) raise exceptions.UnknownConnectionError(msg) @log_request def put(self, url, body, headers=None): - options = self._get_request_options('put', headers) + options = self._get_request_options("put", headers) try: url = self.base_url + url timeout = self.request_timeout return requests.put(url, body, timeout=timeout, **options) except requests.exceptions.Timeout: - msg = 'Request to %s timed out' % url + msg = "Request to %s timed out" % url raise exceptions.ConnectTimeout(msg) except requests.exceptions.ConnectionError as e: - msg = 'Unable to establish connection to %s: %s' % (url, e) + msg = "Unable to establish connection to %s: %s" % (url, e) raise exceptions.ConnectFailure(msg) except requests.exceptions.RequestException as e: - msg = 'Unexpected exception for %s: %s' % (url, e) + msg = "Unexpected exception for %s: %s" % (url, e) raise exceptions.UnknownConnectionError(msg) @log_request def patch(self, url, body, headers=None): - options = self._get_request_options('patch', headers) + options = self._get_request_options("patch", headers) try: url = self.base_url + url timeout = self.request_timeout return requests.patch(url, body, timeout=timeout, **options) except requests.exceptions.Timeout: - msg = 'Request to %s timed out' % url + msg = "Request to %s timed out" % url raise exceptions.ConnectTimeout(msg) except requests.exceptions.ConnectionError as e: - msg = 'Unable to establish connection to %s: %s' % (url, e) + msg = "Unable to establish connection to %s: %s" % (url, e) raise exceptions.ConnectFailure(msg) except requests.exceptions.RequestException as e: - msg = 'Unexpected exception for %s: %s' % (url, e) + msg = "Unexpected exception for %s: %s" % (url, e) raise exceptions.UnknownConnectionError(msg) @log_request def delete(self, url, headers=None): - options = self._get_request_options('delete', headers) + options = self._get_request_options("delete", headers) try: url = self.base_url + url timeout = self.request_timeout return requests.delete(url, timeout=timeout, **options) except requests.exceptions.Timeout: - msg = 'Request to %s timed out' % url + msg = "Request to %s timed out" % url raise exceptions.ConnectTimeout(msg) except requests.exceptions.ConnectionError as e: - msg = 'Unable to establish connection to %s: %s' % (url, e) + msg = "Unable to establish connection to %s: %s" % (url, e) raise exceptions.ConnectFailure(msg) except requests.exceptions.RequestException as e: - msg = 'Unexpected exception for %s: %s' % (url, e) + msg = "Unexpected exception for %s: %s" % (url, e) raise exceptions.UnknownConnectionError(msg) def _get_request_options(self, method, headers): headers = self._update_headers(headers) - if method in ['post', 'put', 'patch']: - content_type = headers.get('content-type', 'application/json') - headers['content-type'] = content_type + if method in ["post", "put", "patch"]: + content_type = headers.get("content-type", "application/json") + headers["content-type"] = content_type options = copy.deepcopy(self.ssl_options) - options['headers'] = headers + options["headers"] = headers return options @@ -171,17 +178,17 @@ class HTTPClient(object): if not headers: headers = {} - token = headers.get('x-auth-token', self.token) + token = headers.get("x-auth-token", self.token) if token: - headers['x-auth-token'] = token + headers["x-auth-token"] = token - project_id = headers.get('X-Project-Id', self.project_id) + project_id = headers.get("X-Project-Id", self.project_id) if project_id: - headers['X-Project-Id'] = project_id + headers["X-Project-Id"] = project_id - user_id = headers.get('X-User-Id', self.user_id) + user_id = headers.get("X-User-Id", self.user_id) if user_id: - headers['X-User-Id'] = user_id + headers["X-User-Id"] = user_id # Add headers for osprofiler. if osprofiler_web: diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/client.py b/distributedcloud/dcdbsync/dbsyncclient/v1/client.py index 5740e64c4..d344c2478 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/client.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/client.py @@ -28,8 +28,7 @@ from dcdbsync.dbsyncclient.v1.identity import identity_group_manager as igm from dcdbsync.dbsyncclient.v1.identity import identity_user_manager as ium from dcdbsync.dbsyncclient.v1.identity import project_manager as pm from dcdbsync.dbsyncclient.v1.identity import role_manager as rm -from dcdbsync.dbsyncclient.v1.identity \ - import token_revoke_event_manager as trem +from dcdbsync.dbsyncclient.v1.identity import token_revoke_event_manager as trem osprofiler_profiler = importutils.try_import("osprofiler.profiler") @@ -41,40 +40,53 @@ _DEFAULT_REQUEST_TIMEOUT = 15 class Client(object): """Class where the communication from KB to Keystone happens.""" - def __init__(self, dbsync_agent_url=None, username=None, api_key=None, - project_name=None, auth_url=None, project_id=None, - endpoint_type='publicURL', service_type='dcorch-dbsync', - auth_token=None, user_id=None, cacert=None, insecure=False, - profile=None, auth_type='keystone', client_id=None, - client_secret=None, session=None, **kwargs): + def __init__( + self, + dbsync_agent_url=None, + username=None, + api_key=None, + project_name=None, + auth_url=None, + project_id=None, + endpoint_type="publicURL", + service_type="dcorch-dbsync", + auth_token=None, + user_id=None, + cacert=None, + insecure=False, + profile=None, + auth_type="keystone", + client_id=None, + client_secret=None, + session=None, + **kwargs + ): """Communicates with Keystone to fetch necessary values.""" if dbsync_agent_url and not isinstance(dbsync_agent_url, str): - raise RuntimeError('DC DBsync agent url should be a string.') + raise RuntimeError("DC DBsync agent url should be a string.") if auth_url or session: - if auth_type == 'keystone': - (dbsync_agent_url, auth_token, project_id, user_id) = ( - authenticate( - dbsync_agent_url, - username, - api_key, - project_name, - auth_url, - project_id, - endpoint_type, - service_type, - auth_token, - user_id, - session, - cacert, - insecure, - **kwargs - ) + if auth_type == "keystone": + (dbsync_agent_url, auth_token, project_id, user_id) = authenticate( + dbsync_agent_url, + username, + api_key, + project_name, + auth_url, + project_id, + endpoint_type, + service_type, + auth_token, + user_id, + session, + cacert, + insecure, + **kwargs ) else: raise RuntimeError( - 'Invalid authentication type [value=%s, valid_values=%s]' - % (auth_type, 'keystone') + "Invalid authentication type [value=%s, valid_values=%s]" + % (auth_type, "keystone") ) if not dbsync_agent_url: @@ -103,39 +115,44 @@ class Client(object): # update to get a new token def update(self, session=None): if session: - (dbsync_agent_url, auth_token, project_id, user_id) = ( - authenticate( - auth_url=session.auth.auth_url, - username=session.auth._username, - api_key=session.auth._password, - project_name=session.auth._project_name, - user_domain_name=session.auth._user_domain_name, - project_domain_name=session.auth._project_domain_name, - ) + (dbsync_agent_url, auth_token, project_id, user_id) = authenticate( + auth_url=session.auth.auth_url, + username=session.auth._username, + api_key=session.auth._password, + project_name=session.auth._project_name, + user_domain_name=session.auth._user_domain_name, + project_domain_name=session.auth._project_domain_name, ) self.http_client.token = auth_token -def authenticate(dbsync_agent_url=None, username=None, - api_key=None, project_name=None, auth_url=None, - project_id=None, endpoint_type='internalURL', - service_type='dcorch-dbsync', auth_token=None, user_id=None, - session=None, cacert=None, insecure=False, **kwargs): +def authenticate( + dbsync_agent_url=None, + username=None, + api_key=None, + project_name=None, + auth_url=None, + project_id=None, + endpoint_type="internalURL", + service_type="dcorch-dbsync", + auth_token=None, + user_id=None, + session=None, + cacert=None, + insecure=False, + **kwargs +): """Get token, project_id, user_id and Endpoint.""" if project_name and project_id: - raise RuntimeError( - 'Only project name or project id should be set' - ) + raise RuntimeError("Only project name or project id should be set") if username and user_id: - raise RuntimeError( - 'Only user name or user id should be set' - ) - user_domain_name = kwargs.get('user_domain_name') - user_domain_id = kwargs.get('user_domain_id') - project_domain_name = kwargs.get('project_domain_name') - project_domain_id = kwargs.get('project_domain_id') + raise RuntimeError("Only user name or user id should be set") + user_domain_name = kwargs.get("user_domain_name") + user_domain_id = kwargs.get("user_domain_id") + project_domain_name = kwargs.get("project_domain_name") + project_domain_id = kwargs.get("project_domain_id") if session is None: if auth_token: @@ -159,11 +176,14 @@ def authenticate(dbsync_agent_url=None, username=None, user_domain_name=user_domain_name, user_domain_id=user_domain_id, project_domain_name=project_domain_name, - project_domain_id=project_domain_id) + project_domain_id=project_domain_id, + ) else: - raise RuntimeError('You must either provide a valid token or' - 'a password (api_key) and a user.') + raise RuntimeError( + "You must either provide a valid token or a password (api_key) " + "and a user." + ) if auth: session = ks_session.Session(auth=auth) @@ -173,7 +193,7 @@ def authenticate(dbsync_agent_url=None, username=None, user_id = session.get_user_id() if not dbsync_agent_url: dbsync_agent_url = session.get_endpoint( - service_type=service_type, - interface=endpoint_type) + service_type=service_type, interface=endpoint_type + ) return dbsync_agent_url, token, project_id, user_id diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_group_manager.py b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_group_manager.py index e952b2aba..21b334e73 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_group_manager.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_group_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019-2021 Wind River Systems, Inc. +# Copyright (c) 2019-2021, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -25,10 +25,11 @@ from dcdbsync.dbsyncclient import exceptions class Group(base.Resource): - resource_name = 'group' + resource_name = "group" - def __init__(self, manager, id, domain_id, name, - description, local_user_ids, extra={}): + def __init__( + self, manager, id, domain_id, name, description, local_user_ids, extra={} + ): self.manager = manager self.id = id self.domain_id = domain_id @@ -39,10 +40,15 @@ class Group(base.Resource): def info(self): resource_info = dict() - resource_info.update({self.resource_name: - {'name': self.name, - 'id': self.id, - 'domain_id': self.domain_id}}) + resource_info.update( + { + self.resource_name: { + "name": self.name, + "id": self.id, + "domain_id": self.domain_id, + } + } + ) return resource_info @@ -55,7 +61,7 @@ class identity_group_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 201: self._raise_api_exception(resp) @@ -68,7 +74,7 @@ class identity_group_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -79,12 +85,13 @@ class identity_group_manager(base.ResourceManager): for json_object in json_objects: group = Group( self, - id=json_object['group']['id'], - domain_id=json_object['group']['domain_id'], - name=json_object['group']['name'], - extra=json_object['group']['extra'], - description=json_object['group']['description'], - local_user_ids=json_object['local_user_ids']) + id=json_object["group"]["id"], + domain_id=json_object["group"]["domain_id"], + name=json_object["group"]["name"], + extra=json_object["group"]["extra"], + description=json_object["group"]["description"], + local_user_ids=json_object["local_user_ids"], + ) groups.append(group) @@ -95,7 +102,7 @@ class identity_group_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 200: self._raise_api_exception(resp) @@ -108,7 +115,7 @@ class identity_group_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 200: self._raise_api_exception(resp) @@ -117,17 +124,17 @@ class identity_group_manager(base.ResourceManager): return json_object def add_group(self, data): - url = '/identity/groups/' + url = "/identity/groups/" return self.group_create(url, data) def list_groups(self): - url = '/identity/groups/' + url = "/identity/groups/" return self.group_list(url) def group_detail(self, group_ref): - url = '/identity/groups/%s' % group_ref + url = "/identity/groups/%s" % group_ref return self._group_detail(url) def update_group(self, group_ref, data): - url = '/identity/groups/%s' % group_ref + url = "/identity/groups/%s" % group_ref return self._group_update(url, data) diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_user_manager.py b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_user_manager.py index f563fe764..18e767a2f 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_user_manager.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/identity_user_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -25,11 +25,20 @@ from dcdbsync.dbsyncclient import exceptions class Password(base.Resource): - resource_name = 'password' + resource_name = "password" - def __init__(self, manager, id, local_user_id, self_service, - password_hash, created_at, created_at_int, expires_at, - expires_at_int): + def __init__( + self, + manager, + id, + local_user_id, + self_service, + password_hash, + created_at, + created_at_int, + expires_at, + expires_at_int, + ): self.manager = manager self.id = id # Foreign key to local_user.id @@ -43,11 +52,19 @@ class Password(base.Resource): class LocalUser(base.Resource): - resource_name = 'localUser' + resource_name = "localUser" - def __init__(self, manager, id, domain_id, name, user_id, - failed_auth_count, failed_auth_at, - passwords=[]): + def __init__( + self, + manager, + id, + domain_id, + name, + user_id, + failed_auth_count, + failed_auth_at, + passwords=[], + ): self.manager = manager self.id = id self.domain_id = domain_id @@ -59,11 +76,20 @@ class LocalUser(base.Resource): class User(base.Resource): - resource_name = 'user' + resource_name = "user" - def __init__(self, manager, id, domain_id, default_project_id, - enabled, created_at, last_active_at, local_user, - extra={}): + def __init__( + self, + manager, + id, + domain_id, + default_project_id, + enabled, + created_at, + last_active_at, + local_user, + extra={}, + ): self.manager = manager self.id = id self.domain_id = domain_id @@ -76,10 +102,15 @@ class User(base.Resource): def info(self): resource_info = dict() - resource_info.update({self.resource_name: - {'name': self.local_user.name, - 'id': self.id, - 'domain_id': self.domain_id}}) + resource_info.update( + { + self.resource_name: { + "name": self.local_user.name, + "id": self.id, + "domain_id": self.domain_id, + } + } + ) return resource_info @@ -92,7 +123,7 @@ class identity_user_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 201: self._raise_api_exception(resp) @@ -105,7 +136,7 @@ class identity_user_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 200: self._raise_api_exception(resp) @@ -115,43 +146,45 @@ class identity_user_manager(base.ResourceManager): users = [] for json_object in json_objects: passwords = [] - for object in json_object['password']: + for object in json_object["password"]: # skip empty password if not object: continue password = Password( self, - id=object['id'], - local_user_id=object['local_user_id'], - self_service=object['self_service'], - password_hash=object['password_hash'], - created_at=object['created_at'], - created_at_int=object['created_at_int'], - expires_at=object['expires_at'], - expires_at_int=object['expires_at_int']) + id=object["id"], + local_user_id=object["local_user_id"], + self_service=object["self_service"], + password_hash=object["password_hash"], + created_at=object["created_at"], + created_at_int=object["created_at_int"], + expires_at=object["expires_at"], + expires_at_int=object["expires_at_int"], + ) passwords.append(password) local_user = LocalUser( self, - id=json_object['local_user']['id'], - domain_id=json_object['local_user']['domain_id'], - name=json_object['local_user']['name'], - user_id=json_object['local_user']['user_id'], - failed_auth_count=json_object['local_user'][ - 'failed_auth_count'], - failed_auth_at=json_object['local_user']['failed_auth_at'], - passwords=passwords) + id=json_object["local_user"]["id"], + domain_id=json_object["local_user"]["domain_id"], + name=json_object["local_user"]["name"], + user_id=json_object["local_user"]["user_id"], + failed_auth_count=json_object["local_user"]["failed_auth_count"], + failed_auth_at=json_object["local_user"]["failed_auth_at"], + passwords=passwords, + ) user = User( self, - id=json_object['user']['id'], - domain_id=json_object['user']['domain_id'], - default_project_id=json_object['user']['default_project_id'], - enabled=json_object['user']['enabled'], - created_at=json_object['user']['created_at'], - last_active_at=json_object['user']['last_active_at'], - extra=json_object['user']['extra'], - local_user=local_user) + id=json_object["user"]["id"], + domain_id=json_object["user"]["domain_id"], + default_project_id=json_object["user"]["default_project_id"], + enabled=json_object["user"]["enabled"], + created_at=json_object["user"]["created_at"], + last_active_at=json_object["user"]["last_active_at"], + extra=json_object["user"]["extra"], + local_user=local_user, + ) users.append(user) @@ -162,7 +195,7 @@ class identity_user_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 200: self._raise_api_exception(resp) @@ -175,7 +208,7 @@ class identity_user_manager(base.ResourceManager): # Unauthorized request if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request.') + raise exceptions.Unauthorized("Unauthorized request.") if resp.status_code != 200: self._raise_api_exception(resp) @@ -184,17 +217,17 @@ class identity_user_manager(base.ResourceManager): return json_object def add_user(self, data): - url = '/identity/users/' + url = "/identity/users/" return self.user_create(url, data) def list_users(self): - url = '/identity/users/' + url = "/identity/users/" return self.users_list(url) def user_detail(self, user_ref): - url = '/identity/users/%s' % user_ref + url = "/identity/users/%s" % user_ref return self._user_detail(url) def update_user(self, user_ref, data): - url = '/identity/users/%s' % user_ref + url = "/identity/users/%s" % user_ref return self._user_update(url, data) diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/project_manager.py b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/project_manager.py index 75b0b6358..ef84efd83 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/project_manager.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/project_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -25,11 +25,20 @@ from dcdbsync.dbsyncclient import exceptions class Project(base.Resource): - resource_name = 'project' + resource_name = "project" - def __init__(self, manager, id, domain_id, name, - enabled, parent_id, is_domain, extra={}, - description=""): + def __init__( + self, + manager, + id, + domain_id, + name, + enabled, + parent_id, + is_domain, + extra={}, + description="", + ): self.manager = manager self.id = id self.domain_id = domain_id @@ -42,10 +51,15 @@ class Project(base.Resource): def info(self): resource_info = dict() - resource_info.update({self.resource_name: - {'name': self.name, - 'id': self.id, - 'domain_id': self.domain_id}}) + resource_info.update( + { + self.resource_name: { + "name": self.name, + "id": self.id, + "domain_id": self.domain_id, + } + } + ) return resource_info @@ -57,7 +71,7 @@ class project_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 201: self._raise_api_exception(resp) @@ -70,7 +84,7 @@ class project_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -79,17 +93,18 @@ class project_manager(base.ResourceManager): projects = [] for json_object in json_objects: - json_object = json_object['project'] + json_object = json_object["project"] project = Project( self, - id=json_object['id'], - domain_id=json_object['domain_id'], - name=json_object['name'], - extra=json_object['extra'], - description=json_object['description'], - enabled=json_object['enabled'], - parent_id=json_object['parent_id'], - is_domain=json_object['is_domain']) + id=json_object["id"], + domain_id=json_object["domain_id"], + name=json_object["name"], + extra=json_object["extra"], + description=json_object["description"], + enabled=json_object["enabled"], + parent_id=json_object["parent_id"], + is_domain=json_object["is_domain"], + ) projects.append(project) @@ -100,7 +115,7 @@ class project_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -113,7 +128,7 @@ class project_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -122,17 +137,17 @@ class project_manager(base.ResourceManager): return json_object def add_project(self, data): - url = '/identity/projects/' + url = "/identity/projects/" return self.project_create(url, data) def list_projects(self): - url = '/identity/projects/' + url = "/identity/projects/" return self.projects_list(url) def project_detail(self, project_ref): - url = '/identity/projects/%s' % project_ref + url = "/identity/projects/%s" % project_ref return self._project_detail(url) def update_project(self, project_ref, data): - url = '/identity/projects/%s' % project_ref + url = "/identity/projects/%s" % project_ref return self._project_update(url, data) diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/role_manager.py b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/role_manager.py index 0c44399f3..721899882 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/role_manager.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/role_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -24,7 +24,7 @@ from dcdbsync.dbsyncclient import exceptions class Role(base.Resource): - resource_name = 'role' + resource_name = "role" def __init__(self, manager, id, domain_id, name, description, extra={}): self.manager = manager @@ -36,10 +36,15 @@ class Role(base.Resource): def info(self): resource_info = dict() - resource_info.update({self.resource_name: - {'name': self.name, - 'id': self.id, - 'domain_id': self.domain_id}}) + resource_info.update( + { + self.resource_name: { + "name": self.name, + "id": self.id, + "domain_id": self.domain_id, + } + } + ) return resource_info @@ -51,7 +56,7 @@ class role_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 201: self._raise_api_exception(resp) @@ -64,7 +69,7 @@ class role_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -73,14 +78,15 @@ class role_manager(base.ResourceManager): roles = [] for json_object in json_objects: - json_object = json_object.get('role') + json_object = json_object.get("role") role = Role( self, - id=json_object['id'], - domain_id=json_object['domain_id'], - name=json_object['name'], - description=json_object['description'], - extra=json_object['extra']) + id=json_object["id"], + domain_id=json_object["domain_id"], + name=json_object["name"], + description=json_object["description"], + extra=json_object["extra"], + ) roles.append(role) @@ -91,7 +97,7 @@ class role_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -104,7 +110,7 @@ class role_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -113,17 +119,17 @@ class role_manager(base.ResourceManager): return json_object def add_role(self, data): - url = '/identity/roles/' + url = "/identity/roles/" return self.role_create(url, data) def list_roles(self): - url = '/identity/roles/' + url = "/identity/roles/" return self.roles_list(url) def role_detail(self, role_ref): - url = '/identity/roles/%s' % role_ref + url = "/identity/roles/%s" % role_ref return self._role_detail(url) def update_role(self, role_ref, data): - url = '/identity/roles/%s' % role_ref + url = "/identity/roles/%s" % role_ref return self._role_update(url, data) diff --git a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/token_revoke_event_manager.py b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/token_revoke_event_manager.py index f9884e509..5a9e772b7 100644 --- a/distributedcloud/dcdbsync/dbsyncclient/v1/identity/token_revoke_event_manager.py +++ b/distributedcloud/dcdbsync/dbsyncclient/v1/identity/token_revoke_event_manager.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # @@ -25,11 +25,25 @@ from dcdbsync.dbsyncclient import exceptions class RevokeEvent(base.Resource): - resource_name = 'token_revoke_event' + resource_name = "token_revoke_event" - def __init__(self, manager, id, domain_id, project_id, user_id, role_id, - trust_id, consumer_id, access_token_id, issued_before, - expires_at, revoked_at, audit_id, audit_chain_id): + def __init__( + self, + manager, + id, + domain_id, + project_id, + user_id, + role_id, + trust_id, + consumer_id, + access_token_id, + issued_before, + expires_at, + revoked_at, + audit_id, + audit_chain_id, + ): self.manager = manager self.id = id self.domain_id = domain_id @@ -47,13 +61,18 @@ class RevokeEvent(base.Resource): def info(self): resource_info = dict() - resource_info.update({self.resource_name: - {'id': self.id, - 'project_id': self.project_id, - 'user_id': self.user_id, - 'role_id': self.role_id, - 'audit_id': self.audit_id, - 'issued_before': self.issued_before}}) + resource_info.update( + { + self.resource_name: { + "id": self.id, + "project_id": self.project_id, + "user_id": self.user_id, + "role_id": self.role_id, + "audit_id": self.audit_id, + "issued_before": self.issued_before, + } + } + ) return resource_info @@ -65,7 +84,7 @@ class revoke_event_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 201: self._raise_api_exception(resp) @@ -78,7 +97,7 @@ class revoke_event_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -87,22 +106,23 @@ class revoke_event_manager(base.ResourceManager): revoke_events = [] for json_object in json_objects: - json_object = json_object.get('revocation_event') + json_object = json_object.get("revocation_event") revoke_event = RevokeEvent( self, - id=json_object['id'], - domain_id=json_object['domain_id'], - project_id=json_object['project_id'], - user_id=json_object['user_id'], - role_id=json_object['role_id'], - trust_id=json_object['trust_id'], - consumer_id=json_object['consumer_id'], - access_token_id=json_object['access_token_id'], - issued_before=json_object['issued_before'], - expires_at=json_object['expires_at'], - revoked_at=json_object['revoked_at'], - audit_id=json_object['audit_id'], - audit_chain_id=json_object['audit_chain_id']) + id=json_object["id"], + domain_id=json_object["domain_id"], + project_id=json_object["project_id"], + user_id=json_object["user_id"], + role_id=json_object["role_id"], + trust_id=json_object["trust_id"], + consumer_id=json_object["consumer_id"], + access_token_id=json_object["access_token_id"], + issued_before=json_object["issued_before"], + expires_at=json_object["expires_at"], + revoked_at=json_object["revoked_at"], + audit_id=json_object["audit_id"], + audit_chain_id=json_object["audit_chain_id"], + ) revoke_events.append(revoke_event) @@ -113,7 +133,7 @@ class revoke_event_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") if resp.status_code != 200: self._raise_api_exception(resp) @@ -126,41 +146,41 @@ class revoke_event_manager(base.ResourceManager): # Unauthorized if resp.status_code == 401: - raise exceptions.Unauthorized('Unauthorized request') + raise exceptions.Unauthorized("Unauthorized request") # NotFound if resp.status_code == 404: - raise exceptions.NotFound('Requested item not found') + raise exceptions.NotFound("Requested item not found") if resp.status_code != 204: self._raise_api_exception(resp) def add_revoke_event(self, data): - url = '/identity/token-revocation-events/' + url = "/identity/token-revocation-events/" return self.revoke_event_create(url, data) def list_revoke_events(self): - url = '/identity/token-revocation-events/' + url = "/identity/token-revocation-events/" return self.revoke_events_list(url) def revoke_event_detail(self, user_id=None, audit_id=None): if user_id: - url = '/identity/token-revocation-events/users/%s' % user_id + url = "/identity/token-revocation-events/users/%s" % user_id elif audit_id: - url = '/identity/token-revocation-events/audits/%s' % audit_id + url = "/identity/token-revocation-events/audits/%s" % audit_id else: - raise exceptions.\ - IllegalArgumentException('Token revocation event user ID' - ' or audit ID required.') + raise exceptions.IllegalArgumentException( + "Token revocation event user ID or audit ID required." + ) return self._revoke_event_detail(url) def delete_revoke_event(self, user_id=None, audit_id=None): if user_id: - url = '/identity/token-revocation-events/users/%s' % user_id + url = "/identity/token-revocation-events/users/%s" % user_id elif audit_id: - url = '/identity/token-revocation-events/audits/%s' % audit_id + url = "/identity/token-revocation-events/audits/%s" % audit_id else: - raise exceptions.\ - IllegalArgumentException('Token revocation event ID' - ' or audit ID required.') + raise exceptions.IllegalArgumentException( + "Token revocation event ID or audit ID required." + ) return self._revoke_event_delete(url) diff --git a/distributedcloud/dcdbsync/version.py b/distributedcloud/dcdbsync/version.py index 2e025d998..2c53021e2 100644 --- a/distributedcloud/dcdbsync/version.py +++ b/distributedcloud/dcdbsync/version.py @@ -10,11 +10,11 @@ # License for the specific language governing permissions and limitations # under the License. # -# Copyright (c) 2019 Wind River Systems, Inc. +# Copyright (c) 2019, 2024 Wind River Systems, Inc. # # SPDX-License-Identifier: Apache-2.0 # import pbr.version -version_info = pbr.version.VersionInfo('distributedcloud') +version_info = pbr.version.VersionInfo("distributedcloud") diff --git a/distributedcloud/run_black.py b/distributedcloud/run_black.py index 630b49706..565aa5fb9 100644 --- a/distributedcloud/run_black.py +++ b/distributedcloud/run_black.py @@ -6,7 +6,6 @@ import sys # List of module directories to check modules = [ "dccommon", - "dcdbsync/api", "dcdbsync", "dcorch/api", "dcorch/common", @@ -23,7 +22,7 @@ modules = [ ] # List of modules that are already formatted with black -formatted_modules = ["dccommon", "dcdbsync/api"] +formatted_modules = ["dccommon", "dcdbsync"] # Function to run black check