Enable pyupgrade
pyupgrade is a tool (and pre-commit hook) to automatically upgrade syntax for newer versions of the language. It helps getting rid of syntax required for older python versions not supported anymore and prepare us for easier support of newer python versions. The tool is already used in some other OpenStack projects, so it is time to start using it also for Keystone. The change is generated by uncommenting the pre-commit hook and executing `pre-commit run -a` to convert the data. The same could be also achieved by simply trying to commit and adding converted files in few iterations. Change-Id: Ia1f64709e57ebb4e44db128bfea4c5957b2071df
This commit is contained in:
parent
55e8c1e605
commit
aaf0cc8fae
@ -22,11 +22,11 @@ repos:
|
||||
# rev: v1.1.1
|
||||
# hooks:
|
||||
# - id: doc8
|
||||
# - repo: https://github.com/asottile/pyupgrade
|
||||
# rev: v3.15.2
|
||||
# hooks:
|
||||
# - id: pyupgrade
|
||||
# args: ['--py38-plus']
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--py38-plus']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.0
|
||||
hooks:
|
||||
|
@ -61,7 +61,7 @@ source_suffix = '.rst'
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
copyright = u'2010-present, OpenStack Foundation'
|
||||
copyright = '2010-present, OpenStack Foundation'
|
||||
|
||||
# -- Options for openstackdocstheme -------------------------------------------
|
||||
openstackdocs_repo_name = 'openstack/keystone'
|
||||
|
@ -7,7 +7,7 @@ KEYCLOAK_URL = os.environ.get('KEYCLOAK_URL')
|
||||
HOST_IP = os.environ.get('HOST_IP', 'localhost')
|
||||
|
||||
|
||||
class KeycloakClient(object):
|
||||
class KeycloakClient:
|
||||
def __init__(self):
|
||||
self.session = requests.session()
|
||||
|
||||
|
@ -95,7 +95,7 @@ def _get_sso_origin_host():
|
||||
]
|
||||
|
||||
if host not in trusted_dashboards:
|
||||
msg = '%(host)s is not a trusted dashboard host' % {'host': host}
|
||||
msg = f'{host} is not a trusted dashboard host'
|
||||
tr_msg = _('%(host)s is not a trusted dashboard host') % {'host': host}
|
||||
LOG.error(msg)
|
||||
raise exception.Unauthorized(tr_msg)
|
||||
|
@ -102,7 +102,7 @@ class CredentialResource(ks_flask.ResourceBase):
|
||||
ref['blob'] = jsonutils.dumps(blob)
|
||||
return ref
|
||||
else:
|
||||
return super(CredentialResource, self)._assign_unique_id(ref)
|
||||
return super()._assign_unique_id(ref)
|
||||
|
||||
def _list_credentials(self):
|
||||
filters = ['user_id', 'type']
|
||||
|
@ -44,7 +44,7 @@ def _get_versions_list(identity_url):
|
||||
return versions
|
||||
|
||||
|
||||
class MimeTypes(object):
|
||||
class MimeTypes:
|
||||
JSON = 'application/json'
|
||||
JSON_HOME = 'application/json-home'
|
||||
|
||||
@ -107,7 +107,7 @@ def get_version_v3():
|
||||
)
|
||||
|
||||
|
||||
class DiscoveryAPI(object):
|
||||
class DiscoveryAPI:
|
||||
# NOTE(morgan): The Discovery Bits are so special they cannot conform to
|
||||
# Flask-RESTful-isms. We are using straight flask Blueprint(s) here so that
|
||||
# we have a lot more control over what the heck is going on. This is just
|
||||
|
@ -438,9 +438,9 @@ class SAML2MetadataResource(flask_restful.Resource):
|
||||
"""
|
||||
metadata_path = CONF.saml.idp_metadata_path
|
||||
try:
|
||||
with open(metadata_path, 'r') as metadata_handler:
|
||||
with open(metadata_path) as metadata_handler:
|
||||
metadata = metadata_handler.read()
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# Raise HTTP 500 in case Metadata file cannot be read.
|
||||
raise exception.MetadataFileError(reason=e)
|
||||
resp = flask.make_response(metadata, http.client.OK)
|
||||
|
@ -190,10 +190,10 @@ class RequestTokenResource(_OAuth1ResourceBase):
|
||||
initiator=notifications.build_audit_initiator(),
|
||||
)
|
||||
|
||||
result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % {
|
||||
'key': token_ref['id'],
|
||||
'secret': token_ref['request_secret'],
|
||||
}
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['request_secret'],
|
||||
)
|
||||
|
||||
if CONF.oauth1.request_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at']
|
||||
@ -293,10 +293,10 @@ class AccessTokenResource(_OAuth1ResourceBase):
|
||||
initiator=notifications.build_audit_initiator(),
|
||||
)
|
||||
|
||||
result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % {
|
||||
'key': token_ref['id'],
|
||||
'secret': token_ref['access_secret'],
|
||||
}
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['access_secret'],
|
||||
)
|
||||
|
||||
if CONF.oauth1.access_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at'])
|
||||
|
@ -397,10 +397,10 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
prior_role_link = ''
|
||||
if 'role_id' in entity.get('indirect', {}):
|
||||
formatted_link += '/roles/%s' % entity['indirect']['role_id']
|
||||
prior_role_link = '/prior_role/%(prior)s/implies/%(implied)s' % {
|
||||
'prior': entity['role_id'],
|
||||
'implied': entity['indirect']['role_id'],
|
||||
}
|
||||
prior_role_link = '/prior_role/{prior}/implies/{implied}'.format(
|
||||
prior=entity['role_id'],
|
||||
implied=entity['indirect']['role_id'],
|
||||
)
|
||||
else:
|
||||
formatted_link += '/roles/%s' % entity['role_id']
|
||||
|
||||
|
@ -488,9 +488,9 @@ class _OAuth1ResourceBase(ks_flask.ResourceBase):
|
||||
# method. This was chosen as it more closely mirrors the pre-flask
|
||||
# code (for transition).
|
||||
ref.setdefault('links', {})
|
||||
path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % {
|
||||
'user_id': ref.get('authorizing_user_id', '')
|
||||
}
|
||||
path = '/users/{user_id}/OS-OAUTH1/access_tokens'.format(
|
||||
user_id=ref.get('authorizing_user_id', '')
|
||||
)
|
||||
ref['links']['self'] = ks_flask.base_url(path) + '/' + ref['id']
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class ApplicationCredentialDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class ApplicationCredentialDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def authenticate(self, application_credential_id, secret):
|
||||
|
@ -43,7 +43,7 @@ class Manager(manager.Manager):
|
||||
_ACCESS_RULE = 'access_rule'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.application_credential.driver)
|
||||
super().__init__(CONF.application_credential.driver)
|
||||
self._register_callback_listeners()
|
||||
|
||||
def _register_callback_listeners(self):
|
||||
@ -83,7 +83,7 @@ class Manager(manager.Manager):
|
||||
assignment_list = self.assignment_api.list_role_assignments(
|
||||
user_id=user_id, project_id=project_id, effective=True
|
||||
)
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
def _require_user_has_role_in_project(self, roles, user_id, project_id):
|
||||
user_roles = self._get_user_roles(user_id, project_id)
|
||||
|
@ -21,7 +21,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class AssignmentDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class AssignmentDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.assignment.list_limit or CONF.list_limit
|
||||
|
@ -18,7 +18,7 @@ from keystone import exception
|
||||
from keystone.i18n import _
|
||||
|
||||
|
||||
class AssignmentType(object):
|
||||
class AssignmentType:
|
||||
USER_PROJECT = 'UserProject'
|
||||
GROUP_PROJECT = 'GroupProject'
|
||||
USER_DOMAIN = 'UserDomain'
|
||||
@ -182,7 +182,7 @@ class Assignment(base.AssignmentDriverBase):
|
||||
)
|
||||
)
|
||||
except sql.DBDuplicateEntry:
|
||||
msg = 'User %s already has role %s in tenant %s' % (
|
||||
msg = 'User {} already has role {} in tenant {}'.format(
|
||||
user_id,
|
||||
role_id,
|
||||
project_id,
|
||||
|
@ -68,7 +68,7 @@ class Manager(manager.Manager):
|
||||
|
||||
def __init__(self):
|
||||
assignment_driver = CONF.assignment.driver
|
||||
super(Manager, self).__init__(assignment_driver)
|
||||
super().__init__(assignment_driver)
|
||||
|
||||
self.event_callbacks = {
|
||||
notifications.ACTIONS.deleted: {
|
||||
@ -96,7 +96,7 @@ class Manager(manager.Manager):
|
||||
project_id=project_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['user_id'] for x in assignment_list]))
|
||||
return list({x['user_id'] for x in assignment_list})
|
||||
|
||||
def _send_app_cred_notification_for_role_removal(self, role_id):
|
||||
"""Delete all application credential for a specific role.
|
||||
@ -133,7 +133,7 @@ class Manager(manager.Manager):
|
||||
user_id=user_id, project_id=project_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
@MEMOIZE_COMPUTED_ASSIGNMENTS
|
||||
def get_roles_for_trustor_and_project(self, trustor_id, project_id):
|
||||
@ -156,7 +156,7 @@ class Manager(manager.Manager):
|
||||
strip_domain_roles=False,
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
@MEMOIZE_COMPUTED_ASSIGNMENTS
|
||||
def get_roles_for_user_and_domain(self, user_id, domain_id):
|
||||
@ -171,7 +171,7 @@ class Manager(manager.Manager):
|
||||
user_id=user_id, domain_id=domain_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
def get_roles_for_groups(self, group_ids, project_id=None, domain_id=None):
|
||||
"""Get a list of roles for this group on domain and/or project."""
|
||||
@ -196,7 +196,7 @@ class Manager(manager.Manager):
|
||||
else:
|
||||
raise AttributeError(_("Must specify either domain or project"))
|
||||
|
||||
role_ids = list(set([x['role_id'] for x in assignment_list]))
|
||||
role_ids = list({x['role_id'] for x in assignment_list})
|
||||
return PROVIDERS.role_api.list_roles_from_ids(role_ids)
|
||||
|
||||
@notifications.role_assignment('created')
|
||||
@ -241,13 +241,7 @@ class Manager(manager.Manager):
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
project_ids = list(
|
||||
set(
|
||||
[
|
||||
x['project_id']
|
||||
for x in assignment_list
|
||||
if x.get('project_id')
|
||||
]
|
||||
)
|
||||
{x['project_id'] for x in assignment_list if x.get('project_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_projects_from_ids(project_ids)
|
||||
|
||||
@ -260,9 +254,7 @@ class Manager(manager.Manager):
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
domain_ids = list(
|
||||
set(
|
||||
[x['domain_id'] for x in assignment_list if x.get('domain_id')]
|
||||
)
|
||||
{x['domain_id'] for x in assignment_list if x.get('domain_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_domains_from_ids(domain_ids)
|
||||
|
||||
@ -271,9 +263,7 @@ class Manager(manager.Manager):
|
||||
source_from_group_ids=group_ids, effective=True
|
||||
)
|
||||
domain_ids = list(
|
||||
set(
|
||||
[x['domain_id'] for x in assignment_list if x.get('domain_id')]
|
||||
)
|
||||
{x['domain_id'] for x in assignment_list if x.get('domain_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_domains_from_ids(domain_ids)
|
||||
|
||||
@ -282,13 +272,7 @@ class Manager(manager.Manager):
|
||||
source_from_group_ids=group_ids, effective=True
|
||||
)
|
||||
project_ids = list(
|
||||
set(
|
||||
[
|
||||
x['project_id']
|
||||
for x in assignment_list
|
||||
if x.get('project_id')
|
||||
]
|
||||
)
|
||||
{x['project_id'] for x in assignment_list if x.get('project_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_projects_from_ids(project_ids)
|
||||
|
||||
@ -1497,7 +1481,7 @@ class RoleManager(manager.Manager):
|
||||
)
|
||||
role_driver = assignment_manager_obj.default_role_driver()
|
||||
|
||||
super(RoleManager, self).__init__(role_driver)
|
||||
super().__init__(role_driver)
|
||||
|
||||
@MEMOIZE
|
||||
def get_role(self, role_id):
|
||||
|
@ -29,7 +29,7 @@ NULL_DOMAIN_ID = '<<null>>'
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RoleDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class RoleDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.role.list_limit or CONF.list_limit
|
||||
|
@ -21,9 +21,7 @@ from keystone.common import sql
|
||||
class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
|
||||
def to_dict(self, include_extra_dict=False):
|
||||
d = super(RoleTable, self).to_dict(
|
||||
include_extra_dict=include_extra_dict
|
||||
)
|
||||
d = super().to_dict(include_extra_dict=include_extra_dict)
|
||||
if d['domain_id'] == base.NULL_DOMAIN_ID:
|
||||
d['domain_id'] = None
|
||||
# NOTE(notmorgan): Eventually it may make sense to drop the empty
|
||||
@ -48,7 +46,7 @@ class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
if opt_value is not None:
|
||||
opt.validator(opt_value)
|
||||
resource_options[opt.option_id] = opt_value
|
||||
role_obj = super(RoleTable, cls).from_dict(new_dict)
|
||||
role_obj = super().from_dict(new_dict)
|
||||
setattr(role_obj, '_resource_options', resource_options)
|
||||
return role_obj
|
||||
|
||||
|
@ -103,7 +103,7 @@ class AuthContext(dict):
|
||||
'as it has conflicting values %(new)s and %(old)s'
|
||||
) % ({'attribute': key, 'new': val, 'old': existing_val})
|
||||
raise exception.Unauthorized(msg)
|
||||
return super(AuthContext, self).__setitem__(key, val)
|
||||
return super().__setitem__(key, val)
|
||||
|
||||
def update(self, E=None, **F):
|
||||
"""Override update to prevent conflicting values."""
|
||||
@ -118,7 +118,7 @@ class AuthContext(dict):
|
||||
self[key] = val
|
||||
|
||||
|
||||
class AuthInfo(provider_api.ProviderAPIMixin, object):
|
||||
class AuthInfo(provider_api.ProviderAPIMixin):
|
||||
"""Encapsulation of "auth" request."""
|
||||
|
||||
@staticmethod
|
||||
@ -445,7 +445,7 @@ class AuthInfo(provider_api.ProviderAPIMixin, object):
|
||||
self._scope_data = (domain_id, project_id, trust, unscoped, system)
|
||||
|
||||
|
||||
class UserMFARulesValidator(provider_api.ProviderAPIMixin, object):
|
||||
class UserMFARulesValidator(provider_api.ProviderAPIMixin):
|
||||
"""Helper object that can validate the MFA Rules."""
|
||||
|
||||
@classmethod
|
||||
|
@ -24,9 +24,7 @@ AuthHandlerResponse = collections.namedtuple(
|
||||
)
|
||||
|
||||
|
||||
class AuthMethodHandler(
|
||||
provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta
|
||||
):
|
||||
class AuthMethodHandler(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
"""Abstract base class for an authentication plugin."""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -99,7 +99,7 @@ def convert_integer_to_method_list(method_int):
|
||||
return methods
|
||||
|
||||
|
||||
class BaseUserInfo(provider_api.ProviderAPIMixin, object):
|
||||
class BaseUserInfo(provider_api.ProviderAPIMixin):
|
||||
|
||||
@classmethod
|
||||
def create(cls, auth_payload, method_name):
|
||||
@ -216,13 +216,11 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object):
|
||||
class UserAuthInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super(UserAuthInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.password = None
|
||||
|
||||
def _validate_and_normalize_auth_data(self, auth_payload):
|
||||
super(UserAuthInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
user_info = auth_payload['user']
|
||||
self.password = user_info.get('password')
|
||||
|
||||
@ -230,20 +228,18 @@ class UserAuthInfo(BaseUserInfo):
|
||||
class TOTPUserInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super(TOTPUserInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.passcode = None
|
||||
|
||||
def _validate_and_normalize_auth_data(self, auth_payload):
|
||||
super(TOTPUserInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
user_info = auth_payload['user']
|
||||
self.passcode = user_info.get('passcode')
|
||||
|
||||
|
||||
class AppCredInfo(BaseUserInfo):
|
||||
def __init__(self):
|
||||
super(AppCredInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.id = None
|
||||
self.secret = None
|
||||
|
||||
@ -257,13 +253,9 @@ class AppCredInfo(BaseUserInfo):
|
||||
if not auth_payload.get('user'):
|
||||
auth_payload['user'] = {}
|
||||
auth_payload['user']['id'] = self.user_id
|
||||
super(AppCredInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
elif auth_payload.get('name'):
|
||||
super(AppCredInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('name', auth_payload['name'])
|
||||
app_cred = app_cred_api.list_application_credentials(
|
||||
|
@ -94,4 +94,4 @@ class KerberosDomain(Domain):
|
||||
def _authenticate(self):
|
||||
if flask.request.environ.get('AUTH_TYPE') != 'Negotiate':
|
||||
raise exception.Unauthorized(_("auth_type is not Negotiate"))
|
||||
return super(KerberosDomain, self)._authenticate()
|
||||
return super()._authenticate()
|
||||
|
@ -22,9 +22,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class CatalogDriverBase(
|
||||
provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta
|
||||
):
|
||||
class CatalogDriverBase(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
"""Interface description for the Catalog driver."""
|
||||
|
||||
def _get_list_limit(self):
|
||||
|
@ -100,7 +100,7 @@ class Endpoint(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
new_dict = endpoint_dict.copy()
|
||||
if new_dict.get('enabled') is None:
|
||||
new_dict['enabled'] = True
|
||||
return super(Endpoint, cls).from_dict(new_dict)
|
||||
return super().from_dict(new_dict)
|
||||
|
||||
|
||||
class Catalog(base.CatalogDriverBase):
|
||||
|
@ -82,7 +82,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
"""
|
||||
|
||||
def __init__(self, templates=None):
|
||||
super(Catalog, self).__init__()
|
||||
super().__init__()
|
||||
LOG.warning(
|
||||
'The templated catalog driver has been deprecated and '
|
||||
'will be removed in a future release.'
|
||||
@ -99,7 +99,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
try:
|
||||
with open(template_file) as f:
|
||||
self.templates = parse_templates(f)
|
||||
except IOError:
|
||||
except OSError:
|
||||
LOG.critical('Unable to open template file %s', template_file)
|
||||
raise
|
||||
|
||||
@ -167,7 +167,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
for key in service_ref:
|
||||
if key.endswith('URL'):
|
||||
interface = key[:-3]
|
||||
endpoint_id = '%s-%s-%s' % (
|
||||
endpoint_id = '{}-{}-{}'.format(
|
||||
region_id,
|
||||
service_type,
|
||||
interface,
|
||||
|
@ -58,7 +58,7 @@ class Manager(manager.Manager):
|
||||
_REGION = 'region'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.catalog.driver)
|
||||
super().__init__(CONF.catalog.driver)
|
||||
notifications.register_event_callback(
|
||||
notifications.ACTIONS.deleted,
|
||||
'project',
|
||||
|
@ -25,7 +25,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Bootstrapper(object):
|
||||
class Bootstrapper:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
@ -50,7 +50,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseApp(object):
|
||||
class BaseApp:
|
||||
|
||||
name = None
|
||||
|
||||
@ -71,7 +71,7 @@ class BootStrap(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(BootStrap, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--bootstrap-username',
|
||||
default='admin',
|
||||
@ -272,7 +272,7 @@ class ProjectSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(ProjectSetup, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--project-name',
|
||||
default=None,
|
||||
@ -308,7 +308,7 @@ class UserSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(UserSetup, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--username',
|
||||
default=None,
|
||||
@ -349,7 +349,7 @@ class Doctor(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(Doctor, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
return parser
|
||||
|
||||
@staticmethod
|
||||
@ -365,7 +365,7 @@ class DbSync(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(DbSync, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'version',
|
||||
default=None,
|
||||
@ -507,9 +507,7 @@ class BasePermissionsSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(BasePermissionsSetup, cls).add_argument_parser(
|
||||
subparsers
|
||||
)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
running_as_root = os.geteuid() == 0
|
||||
parser.add_argument('--keystone-user', required=running_as_root)
|
||||
parser.add_argument('--keystone-group', required=running_as_root)
|
||||
@ -651,7 +649,7 @@ class CreateJWSKeyPair(BasePermissionsSetup):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(CreateJWSKeyPair, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
@ -934,7 +932,7 @@ class TrustFlush(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(TrustFlush, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--project-id',
|
||||
@ -1013,7 +1011,7 @@ class MappingPurge(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingPurge, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
@ -1139,10 +1137,10 @@ def _domain_config_finder(conf_dir):
|
||||
)
|
||||
|
||||
|
||||
class DomainConfigUploadFiles(object):
|
||||
class DomainConfigUploadFiles:
|
||||
|
||||
def __init__(self, domain_config_finder=_domain_config_finder):
|
||||
super(DomainConfigUploadFiles, self).__init__()
|
||||
super().__init__()
|
||||
self.load_backends()
|
||||
self._domain_config_finder = domain_config_finder
|
||||
|
||||
@ -1324,7 +1322,7 @@ class DomainConfigUpload(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
@ -1369,7 +1367,7 @@ class MappingEngineTester(BaseApp):
|
||||
name = 'mapping_engine'
|
||||
|
||||
def __init__(self):
|
||||
super(MappingEngineTester, self).__init__()
|
||||
super().__init__()
|
||||
self.mapping_id = uuid.uuid4().hex
|
||||
self.rules_pathname = None
|
||||
self.rules = None
|
||||
@ -1392,7 +1390,7 @@ class MappingEngineTester(BaseApp):
|
||||
try:
|
||||
with open(path) as file:
|
||||
self.assertion = file.read().strip()
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise SystemExit(
|
||||
_("Error while opening file " "%(path)s: %(err)s")
|
||||
% {'path': path, 'err': e}
|
||||
@ -1496,9 +1494,7 @@ class MappingEngineTester(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingEngineTester, cls).add_argument_parser(
|
||||
subparsers
|
||||
)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.formatter_class = argparse.RawTextHelpFormatter
|
||||
parser.add_argument(
|
||||
@ -1579,7 +1575,7 @@ class MappingPopulate(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingPopulate, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--domain-name',
|
||||
|
@ -27,7 +27,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Identity(object):
|
||||
class Identity:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
4
keystone/common/cache/core.py
vendored
4
keystone/common/cache/core.py
vendored
@ -27,7 +27,7 @@ import keystone.conf
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RegionInvalidationManager(object):
|
||||
class RegionInvalidationManager:
|
||||
|
||||
REGION_KEY_PREFIX = '<<<region>>>:'
|
||||
|
||||
@ -84,7 +84,7 @@ def key_mangler_factory(invalidation_manager, orig_key_mangler):
|
||||
# If it were there would be no way to get to it, making the cache
|
||||
# effectively useless.
|
||||
if not invalidation_manager.is_region_key(key):
|
||||
key = '%s:%s' % (key, invalidation_manager.region_id)
|
||||
key = f'{key}:{invalidation_manager.region_id}'
|
||||
if orig_key_mangler:
|
||||
key = orig_key_mangler(key)
|
||||
return key
|
||||
|
@ -38,7 +38,7 @@ class RequestContext(oslo_context.RequestContext):
|
||||
self.oauth_access_token_id = kwargs.pop('oauth_access_token_id', None)
|
||||
|
||||
self.authenticated = kwargs.pop('authenticated', False)
|
||||
super(RequestContext, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_policy_values(self):
|
||||
"""Add keystone-specific policy values to policy representation.
|
||||
@ -62,7 +62,7 @@ class RequestContext(oslo_context.RequestContext):
|
||||
# needs reworking of how we handle the context in oslo.policy. Until
|
||||
# this is reworked, it is not possible to merge the token render
|
||||
# function into keystone.api
|
||||
values = super(RequestContext, self).to_policy_values()
|
||||
values = super().to_policy_values()
|
||||
values['token'] = self.token_reference['token']
|
||||
values['domain_id'] = self.domain_id if self.domain_id else None
|
||||
return values
|
||||
|
@ -63,7 +63,7 @@ def truncated(f):
|
||||
return wrapper
|
||||
|
||||
|
||||
class Hints(object):
|
||||
class Hints:
|
||||
"""Encapsulate driver hints for listing entities.
|
||||
|
||||
Hints are modifiers that affect the return of entities from a
|
||||
|
@ -34,7 +34,7 @@ CONF = keystone.conf.CONF
|
||||
NULL_KEY = base64.urlsafe_b64encode(b'\x00' * 32)
|
||||
|
||||
|
||||
class FernetUtils(object):
|
||||
class FernetUtils:
|
||||
|
||||
def __init__(self, key_repository, max_active_keys, config_group):
|
||||
self.key_repository = key_repository
|
||||
@ -128,7 +128,7 @@ class FernetUtils(object):
|
||||
f.write(key.decode('utf-8'))
|
||||
f.flush()
|
||||
create_success = True
|
||||
except IOError:
|
||||
except OSError:
|
||||
LOG.error('Failed to create new temporary key: %s', key_file)
|
||||
raise
|
||||
finally:
|
||||
@ -163,7 +163,7 @@ class FernetUtils(object):
|
||||
for filename in os.listdir(key_repo):
|
||||
path = os.path.join(key_repo, str(filename))
|
||||
if os.path.isfile(path):
|
||||
with open(path, 'r') as key_file:
|
||||
with open(path) as key_file:
|
||||
try:
|
||||
key_id = int(filename)
|
||||
except ValueError: # nosec : name is not a number
|
||||
|
@ -50,7 +50,7 @@ def build_v3_extension_parameter_relation(
|
||||
)
|
||||
|
||||
|
||||
class Parameters(object):
|
||||
class Parameters:
|
||||
"""Relationships for Common parameters."""
|
||||
|
||||
DOMAIN_ID = build_v3_parameter_relation('domain_id')
|
||||
@ -71,7 +71,7 @@ class Parameters(object):
|
||||
ACCESS_RULE_ID = build_v3_parameter_relation('access_rule_id')
|
||||
|
||||
|
||||
class Status(object):
|
||||
class Status:
|
||||
"""Status values supported."""
|
||||
|
||||
DEPRECATED = 'deprecated'
|
||||
@ -95,7 +95,7 @@ class Status(object):
|
||||
)
|
||||
|
||||
|
||||
class JsonHomeResources(object):
|
||||
class JsonHomeResources:
|
||||
"""JSON Home resource data."""
|
||||
|
||||
__resources = {}
|
||||
|
@ -87,11 +87,11 @@ class _TraceMeta(type):
|
||||
@staticmethod
|
||||
def wrapper(__f, __classname):
|
||||
__argspec = inspect.getfullargspec(__f)
|
||||
__fn_info = '%(module)s.%(classname)s.%(funcname)s' % {
|
||||
'module': inspect.getmodule(__f).__name__,
|
||||
'classname': __classname,
|
||||
'funcname': __f.__name__,
|
||||
}
|
||||
__fn_info = '{module}.{classname}.{funcname}'.format(
|
||||
module=inspect.getmodule(__f).__name__,
|
||||
classname=__classname,
|
||||
funcname=__f.__name__,
|
||||
)
|
||||
# NOTE(morganfainberg): Omit "cls" and "self" when printing trace logs
|
||||
# the index can be calculated at wrap time rather than at runtime.
|
||||
if __argspec.args and __argspec.args[0] in ('self', 'cls'):
|
||||
@ -120,10 +120,7 @@ class _TraceMeta(type):
|
||||
[
|
||||
', '.join([repr(a) for a in args[__arg_idx:]]),
|
||||
', '.join(
|
||||
[
|
||||
'%(k)s=%(v)r' % {'k': k, 'v': v}
|
||||
for k, v in kwargs.items()
|
||||
]
|
||||
[f'{k}={v!r}' for k, v in kwargs.items()]
|
||||
),
|
||||
]
|
||||
),
|
||||
@ -161,7 +158,7 @@ class _TraceMeta(type):
|
||||
return type.__new__(meta, classname, bases, final_cls_dict)
|
||||
|
||||
|
||||
class Manager(object, metaclass=_TraceMeta):
|
||||
class Manager(metaclass=_TraceMeta):
|
||||
"""Base class for intermediary request layer.
|
||||
|
||||
The Manager layer exists to support additional logic that applies to all
|
||||
|
@ -11,7 +11,7 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
class ProviderAPIRegistry(object):
|
||||
class ProviderAPIRegistry:
|
||||
__shared_object_state = {}
|
||||
__registry = {}
|
||||
__iter__ = __registry.__iter__
|
||||
@ -28,9 +28,7 @@ class ProviderAPIRegistry(object):
|
||||
#
|
||||
# Use "super" to bypass the __setattr__ preventing changes to the
|
||||
# object itself.
|
||||
super(ProviderAPIRegistry, self).__setattr__(
|
||||
'__dict__', self.__shared_object_state
|
||||
)
|
||||
super().__setattr__('__dict__', self.__shared_object_state)
|
||||
|
||||
def __getattr__(self, item):
|
||||
"""Do attr lookup."""
|
||||
@ -70,11 +68,11 @@ class ProviderAPIRegistry(object):
|
||||
"""ONLY USED FOR TESTING."""
|
||||
self.__registry.clear()
|
||||
# Use super to allow setting around class implementation of __setattr__
|
||||
super(ProviderAPIRegistry, self).__setattr__('locked', False)
|
||||
super().__setattr__('locked', False)
|
||||
|
||||
def lock_provider_registry(self):
|
||||
# Use super to allow setting around class implementation of __setattr__
|
||||
super(ProviderAPIRegistry, self).__setattr__('locked', True)
|
||||
super().__setattr__('locked', True)
|
||||
|
||||
def deferred_provider_lookup(self, api, method):
|
||||
"""Create descriptor that performs lookup of api and method on demand.
|
||||
@ -90,7 +88,7 @@ class ProviderAPIRegistry(object):
|
||||
:type method: str
|
||||
"""
|
||||
|
||||
class DeferredProviderLookup(object):
|
||||
class DeferredProviderLookup:
|
||||
def __init__(self, api, method):
|
||||
self.__api = api
|
||||
self.__method = method
|
||||
@ -106,7 +104,7 @@ class DuplicateProviderError(Exception):
|
||||
"""Attempting to register a duplicate API provider."""
|
||||
|
||||
|
||||
class ProviderAPIMixin(object):
|
||||
class ProviderAPIMixin:
|
||||
"""Allow referencing provider apis on self via __getattr__.
|
||||
|
||||
Be sure this class is first in the class definition for inheritance.
|
||||
|
@ -50,7 +50,7 @@ DEFAULT_POLICY_FILE = 'policy.yaml'
|
||||
opts.set_defaults(CONF, DEFAULT_POLICY_FILE)
|
||||
|
||||
|
||||
class RBACEnforcer(object):
|
||||
class RBACEnforcer:
|
||||
"""Enforce RBAC on API calls."""
|
||||
|
||||
__shared_state__ = {}
|
||||
@ -184,7 +184,7 @@ class RBACEnforcer(object):
|
||||
if LOG.logger.getEffectiveLevel() <= log.DEBUG:
|
||||
LOG.debug(
|
||||
'RBAC: Adding query filter params (%s)',
|
||||
', '.join(['%s=%s' % (k, v) for k, v in target.items()]),
|
||||
', '.join([f'{k}={v}' for k, v in target.items()]),
|
||||
)
|
||||
return target
|
||||
|
||||
@ -496,7 +496,7 @@ class RBACEnforcer(object):
|
||||
# LOG the Args
|
||||
args_str = ', '.join(
|
||||
[
|
||||
'%s=%s' % (k, v)
|
||||
f'{k}={v}'
|
||||
for k, v in (flask.request.view_args or {}).items()
|
||||
]
|
||||
)
|
||||
|
@ -105,14 +105,14 @@ def resource_options_ref_to_mapper(ref, option_class):
|
||||
ref._resource_option_mapper[r_opt_id] = opt_obj
|
||||
|
||||
|
||||
class ResourceOptionRegistry(object):
|
||||
class ResourceOptionRegistry:
|
||||
def __init__(self, registry_name):
|
||||
self._registered_options = {}
|
||||
self._registry_type = registry_name
|
||||
|
||||
@property
|
||||
def option_names(self):
|
||||
return set([opt.option_name for opt in self.options])
|
||||
return {opt.option_name for opt in self.options}
|
||||
|
||||
@property
|
||||
def options_by_name(self):
|
||||
@ -182,7 +182,7 @@ class ResourceOptionRegistry(object):
|
||||
self._registered_options[option.option_id] = option
|
||||
|
||||
|
||||
class ResourceOption(object):
|
||||
class ResourceOption:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -30,7 +30,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object):
|
||||
class TokenlessAuthHelper(provider_api.ProviderAPIMixin):
|
||||
def __init__(self, env):
|
||||
"""A init class for TokenlessAuthHelper.
|
||||
|
||||
|
@ -112,7 +112,7 @@ class SmarterEncoder(jsonutils.json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if not isinstance(obj, dict) and hasattr(obj, 'items'):
|
||||
return dict(obj.items())
|
||||
return super(SmarterEncoder, self).default(obj)
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def hash_access_key(access):
|
||||
@ -304,7 +304,7 @@ def get_unix_group(group=None):
|
||||
return group_info.gr_gid, group_info.gr_name
|
||||
|
||||
|
||||
class WhiteListedItemFilter(object):
|
||||
class WhiteListedItemFilter:
|
||||
|
||||
def __init__(self, whitelist, data):
|
||||
self._whitelist = set(whitelist or [])
|
||||
|
@ -52,7 +52,7 @@ def validate_password(password):
|
||||
raise exception.PasswordValidationError(detail=detail)
|
||||
|
||||
|
||||
class SchemaValidator(object):
|
||||
class SchemaValidator:
|
||||
"""Resource reference validator class."""
|
||||
|
||||
validator_org = jsonschema.Draft4Validator
|
||||
|
@ -22,7 +22,7 @@ from keystone import exception
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class CredentialDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class CredentialDriverBase(metaclass=abc.ABCMeta):
|
||||
# credential crud
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -44,7 +44,7 @@ class Manager(manager.Manager):
|
||||
_CRED = 'credential'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.credential.driver)
|
||||
super().__init__(CONF.credential.driver)
|
||||
|
||||
def _decrypt_credential(self, credential):
|
||||
"""Return a decrypted credential reference."""
|
||||
|
@ -23,4 +23,4 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'credential_provider_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.credential.provider)
|
||||
super().__init__(CONF.credential.provider)
|
||||
|