Enable pyupgrade
pyupgrade is a tool (and pre-commit hook) to automatically upgrade syntax for newer versions of the language. It helps getting rid of syntax required for older python versions not supported anymore and prepare us for easier support of newer python versions. The tool is already used in some other OpenStack projects, so it is time to start using it also for Keystone. The change is generated by uncommenting the pre-commit hook and executing `pre-commit run -a` to convert the data. The same could be also achieved by simply trying to commit and adding converted files in few iterations. Change-Id: Ia1f64709e57ebb4e44db128bfea4c5957b2071df
This commit is contained in:
parent
55e8c1e605
commit
aaf0cc8fae
@ -22,11 +22,11 @@ repos:
|
||||
# rev: v1.1.1
|
||||
# hooks:
|
||||
# - id: doc8
|
||||
# - repo: https://github.com/asottile/pyupgrade
|
||||
# rev: v3.15.2
|
||||
# hooks:
|
||||
# - id: pyupgrade
|
||||
# args: ['--py38-plus']
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--py38-plus']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.0
|
||||
hooks:
|
||||
|
@ -61,7 +61,7 @@ source_suffix = '.rst'
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
copyright = u'2010-present, OpenStack Foundation'
|
||||
copyright = '2010-present, OpenStack Foundation'
|
||||
|
||||
# -- Options for openstackdocstheme -------------------------------------------
|
||||
openstackdocs_repo_name = 'openstack/keystone'
|
||||
|
@ -7,7 +7,7 @@ KEYCLOAK_URL = os.environ.get('KEYCLOAK_URL')
|
||||
HOST_IP = os.environ.get('HOST_IP', 'localhost')
|
||||
|
||||
|
||||
class KeycloakClient(object):
|
||||
class KeycloakClient:
|
||||
def __init__(self):
|
||||
self.session = requests.session()
|
||||
|
||||
|
@ -95,7 +95,7 @@ def _get_sso_origin_host():
|
||||
]
|
||||
|
||||
if host not in trusted_dashboards:
|
||||
msg = '%(host)s is not a trusted dashboard host' % {'host': host}
|
||||
msg = f'{host} is not a trusted dashboard host'
|
||||
tr_msg = _('%(host)s is not a trusted dashboard host') % {'host': host}
|
||||
LOG.error(msg)
|
||||
raise exception.Unauthorized(tr_msg)
|
||||
|
@ -102,7 +102,7 @@ class CredentialResource(ks_flask.ResourceBase):
|
||||
ref['blob'] = jsonutils.dumps(blob)
|
||||
return ref
|
||||
else:
|
||||
return super(CredentialResource, self)._assign_unique_id(ref)
|
||||
return super()._assign_unique_id(ref)
|
||||
|
||||
def _list_credentials(self):
|
||||
filters = ['user_id', 'type']
|
||||
|
@ -44,7 +44,7 @@ def _get_versions_list(identity_url):
|
||||
return versions
|
||||
|
||||
|
||||
class MimeTypes(object):
|
||||
class MimeTypes:
|
||||
JSON = 'application/json'
|
||||
JSON_HOME = 'application/json-home'
|
||||
|
||||
@ -107,7 +107,7 @@ def get_version_v3():
|
||||
)
|
||||
|
||||
|
||||
class DiscoveryAPI(object):
|
||||
class DiscoveryAPI:
|
||||
# NOTE(morgan): The Discovery Bits are so special they cannot conform to
|
||||
# Flask-RESTful-isms. We are using straight flask Blueprint(s) here so that
|
||||
# we have a lot more control over what the heck is going on. This is just
|
||||
|
@ -438,9 +438,9 @@ class SAML2MetadataResource(flask_restful.Resource):
|
||||
"""
|
||||
metadata_path = CONF.saml.idp_metadata_path
|
||||
try:
|
||||
with open(metadata_path, 'r') as metadata_handler:
|
||||
with open(metadata_path) as metadata_handler:
|
||||
metadata = metadata_handler.read()
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# Raise HTTP 500 in case Metadata file cannot be read.
|
||||
raise exception.MetadataFileError(reason=e)
|
||||
resp = flask.make_response(metadata, http.client.OK)
|
||||
|
@ -190,10 +190,10 @@ class RequestTokenResource(_OAuth1ResourceBase):
|
||||
initiator=notifications.build_audit_initiator(),
|
||||
)
|
||||
|
||||
result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % {
|
||||
'key': token_ref['id'],
|
||||
'secret': token_ref['request_secret'],
|
||||
}
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['request_secret'],
|
||||
)
|
||||
|
||||
if CONF.oauth1.request_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at']
|
||||
@ -293,10 +293,10 @@ class AccessTokenResource(_OAuth1ResourceBase):
|
||||
initiator=notifications.build_audit_initiator(),
|
||||
)
|
||||
|
||||
result = 'oauth_token=%(key)s&oauth_token_secret=%(secret)s' % {
|
||||
'key': token_ref['id'],
|
||||
'secret': token_ref['access_secret'],
|
||||
}
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['access_secret'],
|
||||
)
|
||||
|
||||
if CONF.oauth1.access_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at'])
|
||||
|
@ -397,10 +397,10 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
prior_role_link = ''
|
||||
if 'role_id' in entity.get('indirect', {}):
|
||||
formatted_link += '/roles/%s' % entity['indirect']['role_id']
|
||||
prior_role_link = '/prior_role/%(prior)s/implies/%(implied)s' % {
|
||||
'prior': entity['role_id'],
|
||||
'implied': entity['indirect']['role_id'],
|
||||
}
|
||||
prior_role_link = '/prior_role/{prior}/implies/{implied}'.format(
|
||||
prior=entity['role_id'],
|
||||
implied=entity['indirect']['role_id'],
|
||||
)
|
||||
else:
|
||||
formatted_link += '/roles/%s' % entity['role_id']
|
||||
|
||||
|
@ -488,9 +488,9 @@ class _OAuth1ResourceBase(ks_flask.ResourceBase):
|
||||
# method. This was chosen as it more closely mirrors the pre-flask
|
||||
# code (for transition).
|
||||
ref.setdefault('links', {})
|
||||
path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % {
|
||||
'user_id': ref.get('authorizing_user_id', '')
|
||||
}
|
||||
path = '/users/{user_id}/OS-OAUTH1/access_tokens'.format(
|
||||
user_id=ref.get('authorizing_user_id', '')
|
||||
)
|
||||
ref['links']['self'] = ks_flask.base_url(path) + '/' + ref['id']
|
||||
|
||||
|
||||
|
@ -17,7 +17,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class ApplicationCredentialDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class ApplicationCredentialDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def authenticate(self, application_credential_id, secret):
|
||||
|
@ -43,7 +43,7 @@ class Manager(manager.Manager):
|
||||
_ACCESS_RULE = 'access_rule'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.application_credential.driver)
|
||||
super().__init__(CONF.application_credential.driver)
|
||||
self._register_callback_listeners()
|
||||
|
||||
def _register_callback_listeners(self):
|
||||
@ -83,7 +83,7 @@ class Manager(manager.Manager):
|
||||
assignment_list = self.assignment_api.list_role_assignments(
|
||||
user_id=user_id, project_id=project_id, effective=True
|
||||
)
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
def _require_user_has_role_in_project(self, roles, user_id, project_id):
|
||||
user_roles = self._get_user_roles(user_id, project_id)
|
||||
|
@ -21,7 +21,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class AssignmentDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class AssignmentDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.assignment.list_limit or CONF.list_limit
|
||||
|
@ -18,7 +18,7 @@ from keystone import exception
|
||||
from keystone.i18n import _
|
||||
|
||||
|
||||
class AssignmentType(object):
|
||||
class AssignmentType:
|
||||
USER_PROJECT = 'UserProject'
|
||||
GROUP_PROJECT = 'GroupProject'
|
||||
USER_DOMAIN = 'UserDomain'
|
||||
@ -182,7 +182,7 @@ class Assignment(base.AssignmentDriverBase):
|
||||
)
|
||||
)
|
||||
except sql.DBDuplicateEntry:
|
||||
msg = 'User %s already has role %s in tenant %s' % (
|
||||
msg = 'User {} already has role {} in tenant {}'.format(
|
||||
user_id,
|
||||
role_id,
|
||||
project_id,
|
||||
|
@ -68,7 +68,7 @@ class Manager(manager.Manager):
|
||||
|
||||
def __init__(self):
|
||||
assignment_driver = CONF.assignment.driver
|
||||
super(Manager, self).__init__(assignment_driver)
|
||||
super().__init__(assignment_driver)
|
||||
|
||||
self.event_callbacks = {
|
||||
notifications.ACTIONS.deleted: {
|
||||
@ -96,7 +96,7 @@ class Manager(manager.Manager):
|
||||
project_id=project_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['user_id'] for x in assignment_list]))
|
||||
return list({x['user_id'] for x in assignment_list})
|
||||
|
||||
def _send_app_cred_notification_for_role_removal(self, role_id):
|
||||
"""Delete all application credential for a specific role.
|
||||
@ -133,7 +133,7 @@ class Manager(manager.Manager):
|
||||
user_id=user_id, project_id=project_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
@MEMOIZE_COMPUTED_ASSIGNMENTS
|
||||
def get_roles_for_trustor_and_project(self, trustor_id, project_id):
|
||||
@ -156,7 +156,7 @@ class Manager(manager.Manager):
|
||||
strip_domain_roles=False,
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
@MEMOIZE_COMPUTED_ASSIGNMENTS
|
||||
def get_roles_for_user_and_domain(self, user_id, domain_id):
|
||||
@ -171,7 +171,7 @@ class Manager(manager.Manager):
|
||||
user_id=user_id, domain_id=domain_id, effective=True
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
return list(set([x['role_id'] for x in assignment_list]))
|
||||
return list({x['role_id'] for x in assignment_list})
|
||||
|
||||
def get_roles_for_groups(self, group_ids, project_id=None, domain_id=None):
|
||||
"""Get a list of roles for this group on domain and/or project."""
|
||||
@ -196,7 +196,7 @@ class Manager(manager.Manager):
|
||||
else:
|
||||
raise AttributeError(_("Must specify either domain or project"))
|
||||
|
||||
role_ids = list(set([x['role_id'] for x in assignment_list]))
|
||||
role_ids = list({x['role_id'] for x in assignment_list})
|
||||
return PROVIDERS.role_api.list_roles_from_ids(role_ids)
|
||||
|
||||
@notifications.role_assignment('created')
|
||||
@ -241,13 +241,7 @@ class Manager(manager.Manager):
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
project_ids = list(
|
||||
set(
|
||||
[
|
||||
x['project_id']
|
||||
for x in assignment_list
|
||||
if x.get('project_id')
|
||||
]
|
||||
)
|
||||
{x['project_id'] for x in assignment_list if x.get('project_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_projects_from_ids(project_ids)
|
||||
|
||||
@ -260,9 +254,7 @@ class Manager(manager.Manager):
|
||||
)
|
||||
# Use set() to process the list to remove any duplicates
|
||||
domain_ids = list(
|
||||
set(
|
||||
[x['domain_id'] for x in assignment_list if x.get('domain_id')]
|
||||
)
|
||||
{x['domain_id'] for x in assignment_list if x.get('domain_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_domains_from_ids(domain_ids)
|
||||
|
||||
@ -271,9 +263,7 @@ class Manager(manager.Manager):
|
||||
source_from_group_ids=group_ids, effective=True
|
||||
)
|
||||
domain_ids = list(
|
||||
set(
|
||||
[x['domain_id'] for x in assignment_list if x.get('domain_id')]
|
||||
)
|
||||
{x['domain_id'] for x in assignment_list if x.get('domain_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_domains_from_ids(domain_ids)
|
||||
|
||||
@ -282,13 +272,7 @@ class Manager(manager.Manager):
|
||||
source_from_group_ids=group_ids, effective=True
|
||||
)
|
||||
project_ids = list(
|
||||
set(
|
||||
[
|
||||
x['project_id']
|
||||
for x in assignment_list
|
||||
if x.get('project_id')
|
||||
]
|
||||
)
|
||||
{x['project_id'] for x in assignment_list if x.get('project_id')}
|
||||
)
|
||||
return PROVIDERS.resource_api.list_projects_from_ids(project_ids)
|
||||
|
||||
@ -1497,7 +1481,7 @@ class RoleManager(manager.Manager):
|
||||
)
|
||||
role_driver = assignment_manager_obj.default_role_driver()
|
||||
|
||||
super(RoleManager, self).__init__(role_driver)
|
||||
super().__init__(role_driver)
|
||||
|
||||
@MEMOIZE
|
||||
def get_role(self, role_id):
|
||||
|
@ -29,7 +29,7 @@ NULL_DOMAIN_ID = '<<null>>'
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RoleDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class RoleDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.role.list_limit or CONF.list_limit
|
||||
|
@ -21,9 +21,7 @@ from keystone.common import sql
|
||||
class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
|
||||
def to_dict(self, include_extra_dict=False):
|
||||
d = super(RoleTable, self).to_dict(
|
||||
include_extra_dict=include_extra_dict
|
||||
)
|
||||
d = super().to_dict(include_extra_dict=include_extra_dict)
|
||||
if d['domain_id'] == base.NULL_DOMAIN_ID:
|
||||
d['domain_id'] = None
|
||||
# NOTE(notmorgan): Eventually it may make sense to drop the empty
|
||||
@ -48,7 +46,7 @@ class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
if opt_value is not None:
|
||||
opt.validator(opt_value)
|
||||
resource_options[opt.option_id] = opt_value
|
||||
role_obj = super(RoleTable, cls).from_dict(new_dict)
|
||||
role_obj = super().from_dict(new_dict)
|
||||
setattr(role_obj, '_resource_options', resource_options)
|
||||
return role_obj
|
||||
|
||||
|
@ -103,7 +103,7 @@ class AuthContext(dict):
|
||||
'as it has conflicting values %(new)s and %(old)s'
|
||||
) % ({'attribute': key, 'new': val, 'old': existing_val})
|
||||
raise exception.Unauthorized(msg)
|
||||
return super(AuthContext, self).__setitem__(key, val)
|
||||
return super().__setitem__(key, val)
|
||||
|
||||
def update(self, E=None, **F):
|
||||
"""Override update to prevent conflicting values."""
|
||||
@ -118,7 +118,7 @@ class AuthContext(dict):
|
||||
self[key] = val
|
||||
|
||||
|
||||
class AuthInfo(provider_api.ProviderAPIMixin, object):
|
||||
class AuthInfo(provider_api.ProviderAPIMixin):
|
||||
"""Encapsulation of "auth" request."""
|
||||
|
||||
@staticmethod
|
||||
@ -445,7 +445,7 @@ class AuthInfo(provider_api.ProviderAPIMixin, object):
|
||||
self._scope_data = (domain_id, project_id, trust, unscoped, system)
|
||||
|
||||
|
||||
class UserMFARulesValidator(provider_api.ProviderAPIMixin, object):
|
||||
class UserMFARulesValidator(provider_api.ProviderAPIMixin):
|
||||
"""Helper object that can validate the MFA Rules."""
|
||||
|
||||
@classmethod
|
||||
|
@ -24,9 +24,7 @@ AuthHandlerResponse = collections.namedtuple(
|
||||
)
|
||||
|
||||
|
||||
class AuthMethodHandler(
|
||||
provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta
|
||||
):
|
||||
class AuthMethodHandler(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
"""Abstract base class for an authentication plugin."""
|
||||
|
||||
def __init__(self):
|
||||
|
@ -99,7 +99,7 @@ def convert_integer_to_method_list(method_int):
|
||||
return methods
|
||||
|
||||
|
||||
class BaseUserInfo(provider_api.ProviderAPIMixin, object):
|
||||
class BaseUserInfo(provider_api.ProviderAPIMixin):
|
||||
|
||||
@classmethod
|
||||
def create(cls, auth_payload, method_name):
|
||||
@ -216,13 +216,11 @@ class BaseUserInfo(provider_api.ProviderAPIMixin, object):
|
||||
class UserAuthInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super(UserAuthInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.password = None
|
||||
|
||||
def _validate_and_normalize_auth_data(self, auth_payload):
|
||||
super(UserAuthInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
user_info = auth_payload['user']
|
||||
self.password = user_info.get('password')
|
||||
|
||||
@ -230,20 +228,18 @@ class UserAuthInfo(BaseUserInfo):
|
||||
class TOTPUserInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super(TOTPUserInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.passcode = None
|
||||
|
||||
def _validate_and_normalize_auth_data(self, auth_payload):
|
||||
super(TOTPUserInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
user_info = auth_payload['user']
|
||||
self.passcode = user_info.get('passcode')
|
||||
|
||||
|
||||
class AppCredInfo(BaseUserInfo):
|
||||
def __init__(self):
|
||||
super(AppCredInfo, self).__init__()
|
||||
super().__init__()
|
||||
self.id = None
|
||||
self.secret = None
|
||||
|
||||
@ -257,13 +253,9 @@ class AppCredInfo(BaseUserInfo):
|
||||
if not auth_payload.get('user'):
|
||||
auth_payload['user'] = {}
|
||||
auth_payload['user']['id'] = self.user_id
|
||||
super(AppCredInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
elif auth_payload.get('name'):
|
||||
super(AppCredInfo, self)._validate_and_normalize_auth_data(
|
||||
auth_payload
|
||||
)
|
||||
super()._validate_and_normalize_auth_data(auth_payload)
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('name', auth_payload['name'])
|
||||
app_cred = app_cred_api.list_application_credentials(
|
||||
|
@ -94,4 +94,4 @@ class KerberosDomain(Domain):
|
||||
def _authenticate(self):
|
||||
if flask.request.environ.get('AUTH_TYPE') != 'Negotiate':
|
||||
raise exception.Unauthorized(_("auth_type is not Negotiate"))
|
||||
return super(KerberosDomain, self)._authenticate()
|
||||
return super()._authenticate()
|
||||
|
@ -22,9 +22,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class CatalogDriverBase(
|
||||
provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta
|
||||
):
|
||||
class CatalogDriverBase(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
"""Interface description for the Catalog driver."""
|
||||
|
||||
def _get_list_limit(self):
|
||||
|
@ -100,7 +100,7 @@ class Endpoint(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
new_dict = endpoint_dict.copy()
|
||||
if new_dict.get('enabled') is None:
|
||||
new_dict['enabled'] = True
|
||||
return super(Endpoint, cls).from_dict(new_dict)
|
||||
return super().from_dict(new_dict)
|
||||
|
||||
|
||||
class Catalog(base.CatalogDriverBase):
|
||||
|
@ -82,7 +82,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
"""
|
||||
|
||||
def __init__(self, templates=None):
|
||||
super(Catalog, self).__init__()
|
||||
super().__init__()
|
||||
LOG.warning(
|
||||
'The templated catalog driver has been deprecated and '
|
||||
'will be removed in a future release.'
|
||||
@ -99,7 +99,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
try:
|
||||
with open(template_file) as f:
|
||||
self.templates = parse_templates(f)
|
||||
except IOError:
|
||||
except OSError:
|
||||
LOG.critical('Unable to open template file %s', template_file)
|
||||
raise
|
||||
|
||||
@ -167,7 +167,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
for key in service_ref:
|
||||
if key.endswith('URL'):
|
||||
interface = key[:-3]
|
||||
endpoint_id = '%s-%s-%s' % (
|
||||
endpoint_id = '{}-{}-{}'.format(
|
||||
region_id,
|
||||
service_type,
|
||||
interface,
|
||||
|
@ -58,7 +58,7 @@ class Manager(manager.Manager):
|
||||
_REGION = 'region'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.catalog.driver)
|
||||
super().__init__(CONF.catalog.driver)
|
||||
notifications.register_event_callback(
|
||||
notifications.ACTIONS.deleted,
|
||||
'project',
|
||||
|
@ -25,7 +25,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Bootstrapper(object):
|
||||
class Bootstrapper:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
@ -50,7 +50,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseApp(object):
|
||||
class BaseApp:
|
||||
|
||||
name = None
|
||||
|
||||
@ -71,7 +71,7 @@ class BootStrap(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(BootStrap, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--bootstrap-username',
|
||||
default='admin',
|
||||
@ -272,7 +272,7 @@ class ProjectSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(ProjectSetup, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--project-name',
|
||||
default=None,
|
||||
@ -308,7 +308,7 @@ class UserSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(UserSetup, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--username',
|
||||
default=None,
|
||||
@ -349,7 +349,7 @@ class Doctor(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(Doctor, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
return parser
|
||||
|
||||
@staticmethod
|
||||
@ -365,7 +365,7 @@ class DbSync(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(DbSync, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'version',
|
||||
default=None,
|
||||
@ -507,9 +507,7 @@ class BasePermissionsSetup(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(BasePermissionsSetup, cls).add_argument_parser(
|
||||
subparsers
|
||||
)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
running_as_root = os.geteuid() == 0
|
||||
parser.add_argument('--keystone-user', required=running_as_root)
|
||||
parser.add_argument('--keystone-group', required=running_as_root)
|
||||
@ -651,7 +649,7 @@ class CreateJWSKeyPair(BasePermissionsSetup):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(CreateJWSKeyPair, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--force',
|
||||
@ -934,7 +932,7 @@ class TrustFlush(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(TrustFlush, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--project-id',
|
||||
@ -1013,7 +1011,7 @@ class MappingPurge(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingPurge, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
@ -1139,10 +1137,10 @@ def _domain_config_finder(conf_dir):
|
||||
)
|
||||
|
||||
|
||||
class DomainConfigUploadFiles(object):
|
||||
class DomainConfigUploadFiles:
|
||||
|
||||
def __init__(self, domain_config_finder=_domain_config_finder):
|
||||
super(DomainConfigUploadFiles, self).__init__()
|
||||
super().__init__()
|
||||
self.load_backends()
|
||||
self._domain_config_finder = domain_config_finder
|
||||
|
||||
@ -1324,7 +1322,7 @@ class DomainConfigUpload(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(DomainConfigUpload, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
parser.add_argument(
|
||||
'--all',
|
||||
default=False,
|
||||
@ -1369,7 +1367,7 @@ class MappingEngineTester(BaseApp):
|
||||
name = 'mapping_engine'
|
||||
|
||||
def __init__(self):
|
||||
super(MappingEngineTester, self).__init__()
|
||||
super().__init__()
|
||||
self.mapping_id = uuid.uuid4().hex
|
||||
self.rules_pathname = None
|
||||
self.rules = None
|
||||
@ -1392,7 +1390,7 @@ class MappingEngineTester(BaseApp):
|
||||
try:
|
||||
with open(path) as file:
|
||||
self.assertion = file.read().strip()
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
raise SystemExit(
|
||||
_("Error while opening file " "%(path)s: %(err)s")
|
||||
% {'path': path, 'err': e}
|
||||
@ -1496,9 +1494,7 @@ class MappingEngineTester(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingEngineTester, cls).add_argument_parser(
|
||||
subparsers
|
||||
)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.formatter_class = argparse.RawTextHelpFormatter
|
||||
parser.add_argument(
|
||||
@ -1579,7 +1575,7 @@ class MappingPopulate(BaseApp):
|
||||
|
||||
@classmethod
|
||||
def add_argument_parser(cls, subparsers):
|
||||
parser = super(MappingPopulate, cls).add_argument_parser(subparsers)
|
||||
parser = super().add_argument_parser(subparsers)
|
||||
|
||||
parser.add_argument(
|
||||
'--domain-name',
|
||||
|
@ -27,7 +27,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Identity(object):
|
||||
class Identity:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
4
keystone/common/cache/core.py
vendored
4
keystone/common/cache/core.py
vendored
@ -27,7 +27,7 @@ import keystone.conf
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RegionInvalidationManager(object):
|
||||
class RegionInvalidationManager:
|
||||
|
||||
REGION_KEY_PREFIX = '<<<region>>>:'
|
||||
|
||||
@ -84,7 +84,7 @@ def key_mangler_factory(invalidation_manager, orig_key_mangler):
|
||||
# If it were there would be no way to get to it, making the cache
|
||||
# effectively useless.
|
||||
if not invalidation_manager.is_region_key(key):
|
||||
key = '%s:%s' % (key, invalidation_manager.region_id)
|
||||
key = f'{key}:{invalidation_manager.region_id}'
|
||||
if orig_key_mangler:
|
||||
key = orig_key_mangler(key)
|
||||
return key
|
||||
|
@ -38,7 +38,7 @@ class RequestContext(oslo_context.RequestContext):
|
||||
self.oauth_access_token_id = kwargs.pop('oauth_access_token_id', None)
|
||||
|
||||
self.authenticated = kwargs.pop('authenticated', False)
|
||||
super(RequestContext, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_policy_values(self):
|
||||
"""Add keystone-specific policy values to policy representation.
|
||||
@ -62,7 +62,7 @@ class RequestContext(oslo_context.RequestContext):
|
||||
# needs reworking of how we handle the context in oslo.policy. Until
|
||||
# this is reworked, it is not possible to merge the token render
|
||||
# function into keystone.api
|
||||
values = super(RequestContext, self).to_policy_values()
|
||||
values = super().to_policy_values()
|
||||
values['token'] = self.token_reference['token']
|
||||
values['domain_id'] = self.domain_id if self.domain_id else None
|
||||
return values
|
||||
|
@ -63,7 +63,7 @@ def truncated(f):
|
||||
return wrapper
|
||||
|
||||
|
||||
class Hints(object):
|
||||
class Hints:
|
||||
"""Encapsulate driver hints for listing entities.
|
||||
|
||||
Hints are modifiers that affect the return of entities from a
|
||||
|
@ -34,7 +34,7 @@ CONF = keystone.conf.CONF
|
||||
NULL_KEY = base64.urlsafe_b64encode(b'\x00' * 32)
|
||||
|
||||
|
||||
class FernetUtils(object):
|
||||
class FernetUtils:
|
||||
|
||||
def __init__(self, key_repository, max_active_keys, config_group):
|
||||
self.key_repository = key_repository
|
||||
@ -128,7 +128,7 @@ class FernetUtils(object):
|
||||
f.write(key.decode('utf-8'))
|
||||
f.flush()
|
||||
create_success = True
|
||||
except IOError:
|
||||
except OSError:
|
||||
LOG.error('Failed to create new temporary key: %s', key_file)
|
||||
raise
|
||||
finally:
|
||||
@ -163,7 +163,7 @@ class FernetUtils(object):
|
||||
for filename in os.listdir(key_repo):
|
||||
path = os.path.join(key_repo, str(filename))
|
||||
if os.path.isfile(path):
|
||||
with open(path, 'r') as key_file:
|
||||
with open(path) as key_file:
|
||||
try:
|
||||
key_id = int(filename)
|
||||
except ValueError: # nosec : name is not a number
|
||||
|
@ -50,7 +50,7 @@ def build_v3_extension_parameter_relation(
|
||||
)
|
||||
|
||||
|
||||
class Parameters(object):
|
||||
class Parameters:
|
||||
"""Relationships for Common parameters."""
|
||||
|
||||
DOMAIN_ID = build_v3_parameter_relation('domain_id')
|
||||
@ -71,7 +71,7 @@ class Parameters(object):
|
||||
ACCESS_RULE_ID = build_v3_parameter_relation('access_rule_id')
|
||||
|
||||
|
||||
class Status(object):
|
||||
class Status:
|
||||
"""Status values supported."""
|
||||
|
||||
DEPRECATED = 'deprecated'
|
||||
@ -95,7 +95,7 @@ class Status(object):
|
||||
)
|
||||
|
||||
|
||||
class JsonHomeResources(object):
|
||||
class JsonHomeResources:
|
||||
"""JSON Home resource data."""
|
||||
|
||||
__resources = {}
|
||||
|
@ -87,11 +87,11 @@ class _TraceMeta(type):
|
||||
@staticmethod
|
||||
def wrapper(__f, __classname):
|
||||
__argspec = inspect.getfullargspec(__f)
|
||||
__fn_info = '%(module)s.%(classname)s.%(funcname)s' % {
|
||||
'module': inspect.getmodule(__f).__name__,
|
||||
'classname': __classname,
|
||||
'funcname': __f.__name__,
|
||||
}
|
||||
__fn_info = '{module}.{classname}.{funcname}'.format(
|
||||
module=inspect.getmodule(__f).__name__,
|
||||
classname=__classname,
|
||||
funcname=__f.__name__,
|
||||
)
|
||||
# NOTE(morganfainberg): Omit "cls" and "self" when printing trace logs
|
||||
# the index can be calculated at wrap time rather than at runtime.
|
||||
if __argspec.args and __argspec.args[0] in ('self', 'cls'):
|
||||
@ -120,10 +120,7 @@ class _TraceMeta(type):
|
||||
[
|
||||
', '.join([repr(a) for a in args[__arg_idx:]]),
|
||||
', '.join(
|
||||
[
|
||||
'%(k)s=%(v)r' % {'k': k, 'v': v}
|
||||
for k, v in kwargs.items()
|
||||
]
|
||||
[f'{k}={v!r}' for k, v in kwargs.items()]
|
||||
),
|
||||
]
|
||||
),
|
||||
@ -161,7 +158,7 @@ class _TraceMeta(type):
|
||||
return type.__new__(meta, classname, bases, final_cls_dict)
|
||||
|
||||
|
||||
class Manager(object, metaclass=_TraceMeta):
|
||||
class Manager(metaclass=_TraceMeta):
|
||||
"""Base class for intermediary request layer.
|
||||
|
||||
The Manager layer exists to support additional logic that applies to all
|
||||
|
@ -11,7 +11,7 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
class ProviderAPIRegistry(object):
|
||||
class ProviderAPIRegistry:
|
||||
__shared_object_state = {}
|
||||
__registry = {}
|
||||
__iter__ = __registry.__iter__
|
||||
@ -28,9 +28,7 @@ class ProviderAPIRegistry(object):
|
||||
#
|
||||
# Use "super" to bypass the __setattr__ preventing changes to the
|
||||
# object itself.
|
||||
super(ProviderAPIRegistry, self).__setattr__(
|
||||
'__dict__', self.__shared_object_state
|
||||
)
|
||||
super().__setattr__('__dict__', self.__shared_object_state)
|
||||
|
||||
def __getattr__(self, item):
|
||||
"""Do attr lookup."""
|
||||
@ -70,11 +68,11 @@ class ProviderAPIRegistry(object):
|
||||
"""ONLY USED FOR TESTING."""
|
||||
self.__registry.clear()
|
||||
# Use super to allow setting around class implementation of __setattr__
|
||||
super(ProviderAPIRegistry, self).__setattr__('locked', False)
|
||||
super().__setattr__('locked', False)
|
||||
|
||||
def lock_provider_registry(self):
|
||||
# Use super to allow setting around class implementation of __setattr__
|
||||
super(ProviderAPIRegistry, self).__setattr__('locked', True)
|
||||
super().__setattr__('locked', True)
|
||||
|
||||
def deferred_provider_lookup(self, api, method):
|
||||
"""Create descriptor that performs lookup of api and method on demand.
|
||||
@ -90,7 +88,7 @@ class ProviderAPIRegistry(object):
|
||||
:type method: str
|
||||
"""
|
||||
|
||||
class DeferredProviderLookup(object):
|
||||
class DeferredProviderLookup:
|
||||
def __init__(self, api, method):
|
||||
self.__api = api
|
||||
self.__method = method
|
||||
@ -106,7 +104,7 @@ class DuplicateProviderError(Exception):
|
||||
"""Attempting to register a duplicate API provider."""
|
||||
|
||||
|
||||
class ProviderAPIMixin(object):
|
||||
class ProviderAPIMixin:
|
||||
"""Allow referencing provider apis on self via __getattr__.
|
||||
|
||||
Be sure this class is first in the class definition for inheritance.
|
||||
|
@ -50,7 +50,7 @@ DEFAULT_POLICY_FILE = 'policy.yaml'
|
||||
opts.set_defaults(CONF, DEFAULT_POLICY_FILE)
|
||||
|
||||
|
||||
class RBACEnforcer(object):
|
||||
class RBACEnforcer:
|
||||
"""Enforce RBAC on API calls."""
|
||||
|
||||
__shared_state__ = {}
|
||||
@ -184,7 +184,7 @@ class RBACEnforcer(object):
|
||||
if LOG.logger.getEffectiveLevel() <= log.DEBUG:
|
||||
LOG.debug(
|
||||
'RBAC: Adding query filter params (%s)',
|
||||
', '.join(['%s=%s' % (k, v) for k, v in target.items()]),
|
||||
', '.join([f'{k}={v}' for k, v in target.items()]),
|
||||
)
|
||||
return target
|
||||
|
||||
@ -496,7 +496,7 @@ class RBACEnforcer(object):
|
||||
# LOG the Args
|
||||
args_str = ', '.join(
|
||||
[
|
||||
'%s=%s' % (k, v)
|
||||
f'{k}={v}'
|
||||
for k, v in (flask.request.view_args or {}).items()
|
||||
]
|
||||
)
|
||||
|
@ -105,14 +105,14 @@ def resource_options_ref_to_mapper(ref, option_class):
|
||||
ref._resource_option_mapper[r_opt_id] = opt_obj
|
||||
|
||||
|
||||
class ResourceOptionRegistry(object):
|
||||
class ResourceOptionRegistry:
|
||||
def __init__(self, registry_name):
|
||||
self._registered_options = {}
|
||||
self._registry_type = registry_name
|
||||
|
||||
@property
|
||||
def option_names(self):
|
||||
return set([opt.option_name for opt in self.options])
|
||||
return {opt.option_name for opt in self.options}
|
||||
|
||||
@property
|
||||
def options_by_name(self):
|
||||
@ -182,7 +182,7 @@ class ResourceOptionRegistry(object):
|
||||
self._registered_options[option.option_id] = option
|
||||
|
||||
|
||||
class ResourceOption(object):
|
||||
class ResourceOption:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -30,7 +30,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenlessAuthHelper(provider_api.ProviderAPIMixin, object):
|
||||
class TokenlessAuthHelper(provider_api.ProviderAPIMixin):
|
||||
def __init__(self, env):
|
||||
"""A init class for TokenlessAuthHelper.
|
||||
|
||||
|
@ -112,7 +112,7 @@ class SmarterEncoder(jsonutils.json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if not isinstance(obj, dict) and hasattr(obj, 'items'):
|
||||
return dict(obj.items())
|
||||
return super(SmarterEncoder, self).default(obj)
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def hash_access_key(access):
|
||||
@ -304,7 +304,7 @@ def get_unix_group(group=None):
|
||||
return group_info.gr_gid, group_info.gr_name
|
||||
|
||||
|
||||
class WhiteListedItemFilter(object):
|
||||
class WhiteListedItemFilter:
|
||||
|
||||
def __init__(self, whitelist, data):
|
||||
self._whitelist = set(whitelist or [])
|
||||
|
@ -52,7 +52,7 @@ def validate_password(password):
|
||||
raise exception.PasswordValidationError(detail=detail)
|
||||
|
||||
|
||||
class SchemaValidator(object):
|
||||
class SchemaValidator:
|
||||
"""Resource reference validator class."""
|
||||
|
||||
validator_org = jsonschema.Draft4Validator
|
||||
|
@ -22,7 +22,7 @@ from keystone import exception
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class CredentialDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class CredentialDriverBase(metaclass=abc.ABCMeta):
|
||||
# credential crud
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -44,7 +44,7 @@ class Manager(manager.Manager):
|
||||
_CRED = 'credential'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.credential.driver)
|
||||
super().__init__(CONF.credential.driver)
|
||||
|
||||
def _decrypt_credential(self, credential):
|
||||
"""Return a decrypted credential reference."""
|
||||
|
@ -23,4 +23,4 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'credential_provider_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.credential.provider)
|
||||
super().__init__(CONF.credential.provider)
|
||||
|
@ -13,7 +13,7 @@
|
||||
import abc
|
||||
|
||||
|
||||
class Provider(object, metaclass=abc.ABCMeta):
|
||||
class Provider(metaclass=abc.ABCMeta):
|
||||
"""Interface for credential providers that support encryption."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -15,7 +15,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class EndpointPolicyDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class EndpointPolicyDriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface description for an Endpoint Policy driver."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -38,7 +38,7 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'endpoint_policy_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.endpoint_policy.driver)
|
||||
super().__init__(CONF.endpoint_policy.driver)
|
||||
|
||||
def _assert_valid_association(self, endpoint_id, service_id, region_id):
|
||||
"""Assert that the association is supported.
|
||||
|
@ -23,7 +23,7 @@ from keystone.i18n import _
|
||||
CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
KEYSTONE_API_EXCEPTIONS = set([])
|
||||
KEYSTONE_API_EXCEPTIONS = set()
|
||||
|
||||
# Tests use this to make exception message format errors fatal
|
||||
_FATAL_EXCEPTION_FORMAT_ERRORS = False
|
||||
@ -81,7 +81,7 @@ class Error(Exception, metaclass=_KeystoneExceptionMeta):
|
||||
LOG.warning('missing exception kwargs (programmer error)')
|
||||
message = self.message_format
|
||||
|
||||
super(Error, self).__init__(message)
|
||||
super().__init__(message)
|
||||
|
||||
def _build_message(self, message, **kwargs):
|
||||
"""Build and returns an exception message.
|
||||
@ -328,9 +328,7 @@ class InsufficientAuthMethods(Error):
|
||||
|
||||
def __init__(self, message=None, user_id=None, methods=None):
|
||||
methods_str = '[%s]' % ','.join(methods)
|
||||
super(InsufficientAuthMethods, self).__init__(
|
||||
message, user_id=user_id, methods=methods_str
|
||||
)
|
||||
super().__init__(message, user_id=user_id, methods=methods_str)
|
||||
|
||||
self.user_id = user_id
|
||||
self.methods = methods
|
||||
@ -351,7 +349,7 @@ class AuthPluginException(Unauthorized):
|
||||
message_format = _("Authentication plugin error.")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AuthPluginException, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.authentication = {}
|
||||
|
||||
|
||||
@ -367,7 +365,7 @@ class AuthMethodNotSupported(AuthPluginException):
|
||||
message_format = _("Attempted to authenticate with an unsupported method.")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AuthMethodNotSupported, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.authentication = {'methods': CONF.auth.methods}
|
||||
|
||||
|
||||
@ -381,7 +379,7 @@ class AdditionalAuthRequired(AuthPluginException):
|
||||
message_format = _("Additional authentications steps required.")
|
||||
|
||||
def __init__(self, auth_response=None, **kwargs):
|
||||
super(AdditionalAuthRequired, self).__init__(message=None, **kwargs)
|
||||
super().__init__(message=None, **kwargs)
|
||||
self.authentication = auth_response
|
||||
|
||||
|
||||
@ -427,7 +425,7 @@ class InvalidLimit(Forbidden):
|
||||
|
||||
class LimitTreeExceedError(Exception):
|
||||
def __init__(self, project_id, max_limit_depth):
|
||||
super(LimitTreeExceedError, self).__init__(
|
||||
super().__init__(
|
||||
_(
|
||||
"Keystone cannot start due to project hierarchical depth in the "
|
||||
"current deployment (project_ids: %(project_id)s) exceeds the "
|
||||
@ -646,7 +644,7 @@ class UnexpectedError(SecurityError):
|
||||
# exception.
|
||||
kwargs.setdefault('exception', '')
|
||||
|
||||
return super(UnexpectedError, self)._build_message(
|
||||
return super()._build_message(
|
||||
message or self.debug_message_format, **kwargs
|
||||
)
|
||||
|
||||
@ -734,7 +732,7 @@ class MultipleSQLDriversInConfig(UnexpectedError):
|
||||
|
||||
class MigrationNotProvided(Exception):
|
||||
def __init__(self, mod_name, path):
|
||||
super(MigrationNotProvided, self).__init__(
|
||||
super().__init__(
|
||||
_(
|
||||
"%(mod_name)s doesn't provide database migrations. The migration"
|
||||
" repository path at %(path)s doesn't exist or isn't a directory."
|
||||
@ -806,7 +804,7 @@ class LDAPSizeLimitExceeded(UnexpectedError):
|
||||
class CacheDeserializationError(Exception):
|
||||
|
||||
def __init__(self, obj, data):
|
||||
super(CacheDeserializationError, self).__init__(
|
||||
super().__init__(
|
||||
_('Failed to deserialize %(obj)s. Data is %(data)s')
|
||||
% {'obj': obj, 'data': data}
|
||||
)
|
||||
@ -875,4 +873,4 @@ class RedirectRequired(Exception):
|
||||
def __init__(self, redirect_url, **kwargs):
|
||||
self.redirect_url = redirect_url
|
||||
|
||||
super(RedirectRequired, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
@ -17,7 +17,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class FederationDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class FederationDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_idp(self, idp_id, idp):
|
||||
|
@ -47,7 +47,7 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'federation_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.federation.driver)
|
||||
super().__init__(CONF.federation.driver)
|
||||
notifications.register_event_callback(
|
||||
notifications.ACTIONS.internal,
|
||||
notifications.DOMAIN_DELETED,
|
||||
|
@ -42,7 +42,7 @@ LOG = log.getLogger(__name__)
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class SAMLGenerator(object):
|
||||
class SAMLGenerator:
|
||||
"""A class to generate SAML assertions."""
|
||||
|
||||
def __init__(self):
|
||||
@ -502,10 +502,10 @@ def _sign_assertion(assertion):
|
||||
)
|
||||
|
||||
# xmlsec1 --sign --privkey-pem privkey,cert --id-attr:ID <tag> <file>
|
||||
certificates = '%(idp_private_key)s,%(idp_public_key)s' % {
|
||||
'idp_public_key': CONF.saml.certfile,
|
||||
'idp_private_key': CONF.saml.keyfile,
|
||||
}
|
||||
certificates = '{idp_private_key},{idp_public_key}'.format(
|
||||
idp_public_key=CONF.saml.certfile,
|
||||
idp_private_key=CONF.saml.keyfile,
|
||||
)
|
||||
|
||||
# Verify that the binary used to create the assertion actually exists on
|
||||
# the system. If it doesn't, log a warning for operators to go and install
|
||||
@ -570,7 +570,7 @@ def _sign_assertion(assertion):
|
||||
return saml2.create_class_from_xml_string(saml.Assertion, stdout)
|
||||
|
||||
|
||||
class MetadataGenerator(object):
|
||||
class MetadataGenerator:
|
||||
"""A class for generating SAML IdP Metadata."""
|
||||
|
||||
def generate_metadata(self):
|
||||
@ -598,7 +598,7 @@ class MetadataGenerator(object):
|
||||
def get_cert():
|
||||
try:
|
||||
return sigver.read_cert_from_file(CONF.saml.certfile, 'pem')
|
||||
except (IOError, sigver.CertificateError) as e:
|
||||
except (OSError, sigver.CertificateError) as e:
|
||||
msg = (
|
||||
'Cannot open certificate %(cert_file)s.'
|
||||
'Reason: %(reason)s'
|
||||
@ -608,7 +608,7 @@ class MetadataGenerator(object):
|
||||
'Reason: %(reason)s'
|
||||
) % {'cert_file': CONF.saml.certfile, 'reason': e}
|
||||
LOG.error(msg)
|
||||
raise IOError(tr_msg)
|
||||
raise OSError(tr_msg)
|
||||
|
||||
def key_descriptor():
|
||||
cert = get_cert()
|
||||
@ -717,7 +717,7 @@ class MetadataGenerator(object):
|
||||
return True
|
||||
|
||||
|
||||
class ECPGenerator(object):
|
||||
class ECPGenerator:
|
||||
"""A class for generating an ECP assertion."""
|
||||
|
||||
@staticmethod
|
||||
|
@ -34,7 +34,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class UserType(object):
|
||||
class UserType:
|
||||
"""User mapping type."""
|
||||
|
||||
EPHEMERAL = 'ephemeral'
|
||||
@ -230,7 +230,7 @@ def get_default_attribute_mapping_schema_version():
|
||||
return CONF.federation.attribute_mapping_default_schema_version
|
||||
|
||||
|
||||
class DirectMaps(object):
|
||||
class DirectMaps:
|
||||
"""An abstraction around the remote matches.
|
||||
|
||||
Each match is treated internally as a list.
|
||||
@ -460,10 +460,10 @@ def get_assertion_params_from_env():
|
||||
yield (k, v)
|
||||
|
||||
|
||||
class RuleProcessor(object):
|
||||
class RuleProcessor:
|
||||
"""A class to process assertions and mapping rules."""
|
||||
|
||||
class _EvalType(object):
|
||||
class _EvalType:
|
||||
"""Mapping rule evaluation types."""
|
||||
|
||||
ANY_ONE_OF = 'any_one_of'
|
||||
@ -638,8 +638,7 @@ class RuleProcessor(object):
|
||||
|
||||
def extract_groups(self, groups_by_domain):
|
||||
for groups in list(groups_by_domain.values()):
|
||||
for group in list({g['name']: g for g in groups}.values()):
|
||||
yield group
|
||||
yield from list({g['name']: g for g in groups}.values())
|
||||
|
||||
def _transform(self, identity_values):
|
||||
"""Transform local mappings, to an easier to understand format.
|
||||
@ -974,7 +973,7 @@ class RuleProcessor(object):
|
||||
def assert_enabled_identity_provider(federation_api, idp_id):
|
||||
identity_provider = federation_api.get_idp(idp_id)
|
||||
if identity_provider.get('enabled') is not True:
|
||||
msg = 'Identity Provider %(idp)s is disabled' % {'idp': idp_id}
|
||||
msg = f'Identity Provider {idp_id} is disabled'
|
||||
tr_msg = _('Identity Provider %(idp)s is disabled') % {'idp': idp_id}
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(tr_msg)
|
||||
@ -983,7 +982,7 @@ def assert_enabled_identity_provider(federation_api, idp_id):
|
||||
def assert_enabled_service_provider_object(service_provider):
|
||||
if service_provider.get('enabled') is not True:
|
||||
sp_id = service_provider['id']
|
||||
msg = 'Service Provider %(sp)s is disabled' % {'sp': sp_id}
|
||||
msg = f'Service Provider {sp_id} is disabled'
|
||||
tr_msg = _('Service Provider %(sp)s is disabled') % {'sp': sp_id}
|
||||
LOG.debug(msg)
|
||||
raise exception.Forbidden(tr_msg)
|
||||
@ -1001,9 +1000,7 @@ class RuleProcessorToHonorDomainOption(RuleProcessor):
|
||||
"""
|
||||
|
||||
def __init__(self, mapping_id, rules):
|
||||
super(RuleProcessorToHonorDomainOption, self).__init__(
|
||||
mapping_id, rules
|
||||
)
|
||||
super().__init__(mapping_id, rules)
|
||||
|
||||
def extract_projects(self, identity_value):
|
||||
projects = identity_value.get("projects", [])
|
||||
@ -1019,9 +1016,7 @@ class RuleProcessorToHonorDomainOption(RuleProcessor):
|
||||
return projects
|
||||
|
||||
def normalize_user(self, user, default_mapping_domain):
|
||||
super(RuleProcessorToHonorDomainOption, self).normalize_user(
|
||||
user, default_mapping_domain
|
||||
)
|
||||
super().normalize_user(user, default_mapping_domain)
|
||||
if not user.get("domain"):
|
||||
LOG.debug(
|
||||
"Configuring the domain [%s] for user [%s].",
|
||||
|
@ -46,7 +46,7 @@ def filter_user(user_ref):
|
||||
return user_ref
|
||||
|
||||
|
||||
class IdentityDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class IdentityDriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface description for an Identity driver.
|
||||
|
||||
The schema for users and groups is different depending on whether the
|
||||
|
@ -122,7 +122,7 @@ def py2ldap(val):
|
||||
:returns: unicode string representation of value.
|
||||
"""
|
||||
if isinstance(val, bool):
|
||||
return u'TRUE' if val else u'FALSE'
|
||||
return 'TRUE' if val else 'FALSE'
|
||||
else:
|
||||
return str(val)
|
||||
|
||||
@ -202,8 +202,7 @@ def safe_iter(attrs):
|
||||
if attrs is None:
|
||||
return
|
||||
elif isinstance(attrs, list):
|
||||
for e in attrs:
|
||||
yield e
|
||||
yield from attrs
|
||||
else:
|
||||
yield attrs
|
||||
|
||||
@ -353,7 +352,7 @@ def dn_startswith(descendant_dn, dn):
|
||||
return is_dn_equal(descendant_dn[-len(dn) :], dn)
|
||||
|
||||
|
||||
class LDAPHandler(object, metaclass=abc.ABCMeta):
|
||||
class LDAPHandler(metaclass=abc.ABCMeta):
|
||||
"""Abstract class which defines methods for a LDAP API provider.
|
||||
|
||||
Native Keystone values cannot be passed directly into and from the
|
||||
@ -713,7 +712,7 @@ def _common_ldap_initialization(
|
||||
# works but these values are ignored when setting them on the
|
||||
# connection
|
||||
if not os.path.isfile(tls_cacertfile):
|
||||
raise IOError(
|
||||
raise OSError(
|
||||
_("tls_cacertfile %s not found " "or is not a file")
|
||||
% tls_cacertfile
|
||||
)
|
||||
@ -726,7 +725,7 @@ def _common_ldap_initialization(
|
||||
# works but these values are ignored when setting them on the
|
||||
# connection
|
||||
if not os.path.isdir(tls_cacertdir):
|
||||
raise IOError(
|
||||
raise OSError(
|
||||
_("tls_cacertdir %s not found " "or is not a directory")
|
||||
% tls_cacertdir
|
||||
)
|
||||
@ -739,7 +738,7 @@ def _common_ldap_initialization(
|
||||
)
|
||||
|
||||
|
||||
class AsynchronousMessage(object):
|
||||
class AsynchronousMessage:
|
||||
"""A container for handling asynchronous LDAP responses.
|
||||
|
||||
Some LDAP APIs, like `search_ext`, are asynchronous and return a message ID
|
||||
@ -823,7 +822,7 @@ class PooledLDAPHandler(LDAPHandler):
|
||||
connection_pools = {} # static connector pool dict
|
||||
|
||||
def __init__(self, conn=None, use_auth_pool=False):
|
||||
super(PooledLDAPHandler, self).__init__(conn=conn)
|
||||
super().__init__(conn=conn)
|
||||
self.who = ''
|
||||
self.cred = ''
|
||||
self.conn_options = {} # connection specific options
|
||||
@ -1057,7 +1056,7 @@ class KeystoneLDAPHandler(LDAPHandler):
|
||||
"""
|
||||
|
||||
def __init__(self, conn=None):
|
||||
super(KeystoneLDAPHandler, self).__init__(conn=conn)
|
||||
super().__init__(conn=conn)
|
||||
self.page_size = 0
|
||||
|
||||
def __enter__(self):
|
||||
@ -1371,7 +1370,7 @@ def filter_entity(entity_ref):
|
||||
return entity_ref
|
||||
|
||||
|
||||
class BaseLdap(object):
|
||||
class BaseLdap:
|
||||
DEFAULT_OU = None
|
||||
DEFAULT_STRUCTURAL_CLASSES = None
|
||||
DEFAULT_ID_ATTR = 'cn'
|
||||
@ -1422,9 +1421,10 @@ class BaseLdap(object):
|
||||
self.auth_pool_conn_lifetime = conf.ldap.auth_pool_connection_lifetime
|
||||
|
||||
if self.options_name is not None:
|
||||
self.tree_dn = getattr(
|
||||
conf.ldap, '%s_tree_dn' % self.options_name
|
||||
) or '%s,%s' % (self.DEFAULT_OU, conf.ldap.suffix)
|
||||
self.tree_dn = (
|
||||
getattr(conf.ldap, '%s_tree_dn' % self.options_name)
|
||||
or f'{self.DEFAULT_OU},{conf.ldap.suffix}'
|
||||
)
|
||||
|
||||
idatt = '%s_id_attribute' % self.options_name
|
||||
self.id_attr = getattr(conf.ldap, idatt) or self.DEFAULT_ID_ATTR
|
||||
@ -1435,7 +1435,7 @@ class BaseLdap(object):
|
||||
)
|
||||
|
||||
for k, v in self.attribute_options_names.items():
|
||||
v = '%s_%s_attribute' % (self.options_name, v)
|
||||
v = f'{self.options_name}_{v}_attribute'
|
||||
self.attribute_mapping[k] = getattr(conf.ldap, v)
|
||||
|
||||
attr_mapping_opt = (
|
||||
@ -1546,7 +1546,7 @@ class BaseLdap(object):
|
||||
raise exception.LDAPServerConnectionError(url=self.LDAP_URL)
|
||||
|
||||
def _id_to_dn_string(self, object_id):
|
||||
return u'%s=%s,%s' % (
|
||||
return '{}={},{}'.format(
|
||||
self.id_attr,
|
||||
ldap.dn.escape_dn_chars(str(object_id)),
|
||||
self.tree_dn,
|
||||
@ -1559,7 +1559,7 @@ class BaseLdap(object):
|
||||
search_result = conn.search_s(
|
||||
self.tree_dn,
|
||||
self.LDAP_SCOPE,
|
||||
u'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))'
|
||||
'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))'
|
||||
% {
|
||||
'id_attr': self.id_attr,
|
||||
'id': ldap.filter.escape_filter_chars(str(object_id)),
|
||||
@ -1743,7 +1743,7 @@ class BaseLdap(object):
|
||||
|
||||
# To ensure that ldap attribute value is not empty in ldap config.
|
||||
if not attr:
|
||||
attr_name = '%s_%s_attribute' % (
|
||||
attr_name = '{}_{}_attribute'.format(
|
||||
self.options_name,
|
||||
self.attribute_options_names[ldap_attr_name],
|
||||
)
|
||||
@ -1783,9 +1783,9 @@ class BaseLdap(object):
|
||||
|
||||
def _ldap_get(self, object_id, ldap_filter=None):
|
||||
query = (
|
||||
u'(&(%(id_attr)s=%(id)s)'
|
||||
u'%(filter)s'
|
||||
u'(objectClass=%(object_class)s))'
|
||||
'(&(%(id_attr)s=%(id)s)'
|
||||
'%(filter)s'
|
||||
'(objectClass=%(object_class)s))'
|
||||
% {
|
||||
'id_attr': self.id_attr,
|
||||
'id': ldap.filter.escape_filter_chars(str(object_id)),
|
||||
@ -1797,11 +1797,9 @@ class BaseLdap(object):
|
||||
try:
|
||||
attrs = list(
|
||||
set(
|
||||
(
|
||||
[self.id_attr]
|
||||
+ list(self.attribute_mapping.values())
|
||||
+ list(self.extra_attr_mapping.keys())
|
||||
)
|
||||
[self.id_attr]
|
||||
+ list(self.attribute_mapping.values())
|
||||
+ list(self.extra_attr_mapping.keys())
|
||||
)
|
||||
)
|
||||
res = conn.search_s(
|
||||
@ -1838,7 +1836,7 @@ class BaseLdap(object):
|
||||
|
||||
@driver_hints.truncated
|
||||
def _ldap_get_all(self, hints, ldap_filter=None):
|
||||
query = u'(&%s(objectClass=%s)(%s=*))' % (
|
||||
query = '(&{}(objectClass={})({}=*))'.format(
|
||||
ldap_filter or self.ldap_filter or '',
|
||||
self.object_class,
|
||||
self.id_attr,
|
||||
@ -1846,11 +1844,9 @@ class BaseLdap(object):
|
||||
sizelimit = 0
|
||||
attrs = list(
|
||||
set(
|
||||
(
|
||||
[self.id_attr]
|
||||
+ list(self.attribute_mapping.values())
|
||||
+ list(self.extra_attr_mapping.keys())
|
||||
)
|
||||
[self.id_attr]
|
||||
+ list(self.attribute_mapping.values())
|
||||
+ list(self.extra_attr_mapping.keys())
|
||||
)
|
||||
)
|
||||
if hints.limit:
|
||||
@ -1878,14 +1874,14 @@ class BaseLdap(object):
|
||||
def _ldap_get_list(
|
||||
self, search_base, scope, query_params=None, attrlist=None
|
||||
):
|
||||
query = u'(objectClass=%s)' % self.object_class
|
||||
query = '(objectClass=%s)' % self.object_class
|
||||
if query_params:
|
||||
|
||||
def calc_filter(attrname, value):
|
||||
val_esc = ldap.filter.escape_filter_chars(value)
|
||||
return '(%s=%s)' % (attrname, val_esc)
|
||||
return f'({attrname}={val_esc})'
|
||||
|
||||
query = u'(&%s%s)' % (
|
||||
query = '(&{}{})'.format(
|
||||
query,
|
||||
''.join([calc_filter(k, v) for k, v in query_params.items()]),
|
||||
)
|
||||
@ -1900,7 +1896,7 @@ class BaseLdap(object):
|
||||
return self._ldap_res_to_model(res)
|
||||
|
||||
def get_by_name(self, name, ldap_filter=None):
|
||||
query = u'(%s=%s)' % (
|
||||
query = '({}={})'.format(
|
||||
self.attribute_mapping['name'],
|
||||
ldap.filter.escape_filter_chars(str(name)),
|
||||
)
|
||||
@ -2053,25 +2049,25 @@ class BaseLdap(object):
|
||||
# booleans (this is related to bug #1411478).
|
||||
|
||||
if filter_['comparator'] == 'equals':
|
||||
query_term = u'(%(attr)s=%(val)s)' % {
|
||||
'attr': ldap_attr,
|
||||
'val': val_esc,
|
||||
}
|
||||
query_term = '({attr}={val})'.format(
|
||||
attr=ldap_attr,
|
||||
val=val_esc,
|
||||
)
|
||||
elif filter_['comparator'] == 'contains':
|
||||
query_term = u'(%(attr)s=*%(val)s*)' % {
|
||||
'attr': ldap_attr,
|
||||
'val': val_esc,
|
||||
}
|
||||
query_term = '({attr}=*{val}*)'.format(
|
||||
attr=ldap_attr,
|
||||
val=val_esc,
|
||||
)
|
||||
elif filter_['comparator'] == 'startswith':
|
||||
query_term = u'(%(attr)s=%(val)s*)' % {
|
||||
'attr': ldap_attr,
|
||||
'val': val_esc,
|
||||
}
|
||||
query_term = '({attr}={val}*)'.format(
|
||||
attr=ldap_attr,
|
||||
val=val_esc,
|
||||
)
|
||||
elif filter_['comparator'] == 'endswith':
|
||||
query_term = u'(%(attr)s=*%(val)s)' % {
|
||||
'attr': ldap_attr,
|
||||
'val': val_esc,
|
||||
}
|
||||
query_term = '({attr}=*{val})'.format(
|
||||
attr=ldap_attr,
|
||||
val=val_esc,
|
||||
)
|
||||
else:
|
||||
# It's a filter we don't understand, so let the caller
|
||||
# work out if they need to do something with it.
|
||||
@ -2099,7 +2095,7 @@ class BaseLdap(object):
|
||||
satisfied_filters.append(filter_)
|
||||
|
||||
if filter_list:
|
||||
query = u'(&%s%s)' % (query, ''.join(filter_list))
|
||||
query = '(&{}{})'.format(query, ''.join(filter_list))
|
||||
|
||||
# Remove satisfied filters, then the caller will know remaining filters
|
||||
for filter_ in satisfied_filters:
|
||||
@ -2130,7 +2126,7 @@ class EnabledEmuMixIn(BaseLdap):
|
||||
DEFAULT_GROUP_MEMBERS_ARE_IDS = False
|
||||
|
||||
def __init__(self, conf):
|
||||
super(EnabledEmuMixIn, self).__init__(conf)
|
||||
super().__init__(conf)
|
||||
enabled_emulation = '%s_enabled_emulation' % self.options_name
|
||||
self.enabled_emulation = getattr(conf.ldap, enabled_emulation)
|
||||
|
||||
@ -2175,7 +2171,7 @@ class EnabledEmuMixIn(BaseLdap):
|
||||
return self._is_member_enabled(member_attr_val, conn)
|
||||
|
||||
def _is_member_enabled(self, member_attr_val, conn):
|
||||
query = '(%s=%s)' % (
|
||||
query = '({}={})'.format(
|
||||
self.member_attribute,
|
||||
ldap.filter.escape_filter_chars(member_attr_val),
|
||||
)
|
||||
@ -2221,18 +2217,18 @@ class EnabledEmuMixIn(BaseLdap):
|
||||
def create(self, values):
|
||||
if self.enabled_emulation:
|
||||
enabled_value = values.pop('enabled', True)
|
||||
ref = super(EnabledEmuMixIn, self).create(values)
|
||||
ref = super().create(values)
|
||||
if 'enabled' not in self.attribute_ignore:
|
||||
if enabled_value:
|
||||
self._add_enabled(ref['id'])
|
||||
ref['enabled'] = enabled_value
|
||||
return ref
|
||||
else:
|
||||
return super(EnabledEmuMixIn, self).create(values)
|
||||
return super().create(values)
|
||||
|
||||
def get(self, object_id, ldap_filter=None):
|
||||
with self.get_connection() as conn:
|
||||
ref = super(EnabledEmuMixIn, self).get(object_id, ldap_filter)
|
||||
ref = super().get(object_id, ldap_filter)
|
||||
if (
|
||||
'enabled' not in self.attribute_ignore
|
||||
and self.enabled_emulation
|
||||
@ -2256,13 +2252,13 @@ class EnabledEmuMixIn(BaseLdap):
|
||||
)
|
||||
return obj_list
|
||||
else:
|
||||
return super(EnabledEmuMixIn, self).get_all(ldap_filter, hints)
|
||||
return super().get_all(ldap_filter, hints)
|
||||
|
||||
def update(self, object_id, values, old_obj=None):
|
||||
if 'enabled' not in self.attribute_ignore and self.enabled_emulation:
|
||||
data = values.copy()
|
||||
enabled_value = data.pop('enabled', None)
|
||||
ref = super(EnabledEmuMixIn, self).update(object_id, data, old_obj)
|
||||
ref = super().update(object_id, data, old_obj)
|
||||
if enabled_value is not None:
|
||||
if enabled_value:
|
||||
self._add_enabled(object_id)
|
||||
@ -2271,6 +2267,4 @@ class EnabledEmuMixIn(BaseLdap):
|
||||
ref['enabled'] = enabled_value
|
||||
return ref
|
||||
else:
|
||||
return super(EnabledEmuMixIn, self).update(
|
||||
object_id, values, old_obj
|
||||
)
|
||||
return super().update(object_id, values, old_obj)
|
||||
|
@ -41,7 +41,7 @@ LDAP_MATCHING_RULE_IN_CHAIN = "1.2.840.113556.1.4.1941"
|
||||
|
||||
class Identity(base.IdentityDriverBase):
|
||||
def __init__(self, conf=None):
|
||||
super(Identity, self).__init__()
|
||||
super().__init__()
|
||||
if conf is None:
|
||||
self.conf = CONF
|
||||
else:
|
||||
@ -256,14 +256,14 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap):
|
||||
model = models.User
|
||||
|
||||
def __init__(self, conf):
|
||||
super(UserApi, self).__init__(conf)
|
||||
super().__init__(conf)
|
||||
self.enabled_mask = conf.ldap.user_enabled_mask
|
||||
self.enabled_default = conf.ldap.user_enabled_default
|
||||
self.enabled_invert = conf.ldap.user_enabled_invert
|
||||
self.enabled_emulation = conf.ldap.user_enabled_emulation
|
||||
|
||||
def _ldap_res_to_model(self, res):
|
||||
obj = super(UserApi, self)._ldap_res_to_model(res)
|
||||
obj = super()._ldap_res_to_model(res)
|
||||
if self.enabled_mask != 0:
|
||||
enabled = int(obj.get('enabled', self.enabled_default))
|
||||
obj['enabled'] = (enabled & self.enabled_mask) != self.enabled_mask
|
||||
@ -303,7 +303,7 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap):
|
||||
values['enabled'] = not orig_enabled
|
||||
else:
|
||||
values['enabled'] = self.enabled_default
|
||||
values = super(UserApi, self).create(values)
|
||||
values = super().create(values)
|
||||
if self.enabled_mask or (
|
||||
self.enabled_invert and not self.enabled_emulation
|
||||
):
|
||||
@ -312,7 +312,7 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap):
|
||||
return values
|
||||
|
||||
def get(self, user_id, ldap_filter=None):
|
||||
obj = super(UserApi, self).get(user_id, ldap_filter=ldap_filter)
|
||||
obj = super().get(user_id, ldap_filter=ldap_filter)
|
||||
obj['options'] = {} # options always empty
|
||||
return obj
|
||||
|
||||
@ -324,9 +324,7 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap):
|
||||
raise self.NotFound(user_id=user_id)
|
||||
|
||||
def get_all(self, ldap_filter=None, hints=None):
|
||||
objs = super(UserApi, self).get_all(
|
||||
ldap_filter=ldap_filter, hints=hints
|
||||
)
|
||||
objs = super().get_all(ldap_filter=ldap_filter, hints=hints)
|
||||
for obj in objs:
|
||||
obj['options'] = {} # options always empty
|
||||
return objs
|
||||
@ -357,7 +355,7 @@ class UserApi(common_ldap.EnabledEmuMixIn, common_ldap.BaseLdap):
|
||||
old_obj.pop('options')
|
||||
if 'options' in values:
|
||||
values.pop('options')
|
||||
values = super(UserApi, self).update(user_id, values, old_obj)
|
||||
values = super().update(user_id, values, old_obj)
|
||||
values['options'] = {} # options always empty
|
||||
return values
|
||||
|
||||
@ -375,12 +373,12 @@ class GroupApi(common_ldap.BaseLdap):
|
||||
model = models.Group
|
||||
|
||||
def _ldap_res_to_model(self, res):
|
||||
model = super(GroupApi, self)._ldap_res_to_model(res)
|
||||
model = super()._ldap_res_to_model(res)
|
||||
model['dn'] = res[0]
|
||||
return model
|
||||
|
||||
def __init__(self, conf):
|
||||
super(GroupApi, self).__init__(conf)
|
||||
super().__init__(conf)
|
||||
self.group_ad_nesting = conf.ldap.group_ad_nesting
|
||||
self.member_attribute = (
|
||||
conf.ldap.group_member_attribute or self.DEFAULT_MEMBER_ATTRIBUTE
|
||||
@ -392,17 +390,17 @@ class GroupApi(common_ldap.BaseLdap):
|
||||
data['id'] = uuid.uuid4().hex
|
||||
if 'description' in data and data['description'] in ['', None]:
|
||||
data.pop('description')
|
||||
return super(GroupApi, self).create(data)
|
||||
return super().create(data)
|
||||
|
||||
def update(self, group_id, values):
|
||||
old_obj = self.get(group_id)
|
||||
return super(GroupApi, self).update(group_id, values, old_obj)
|
||||
return super().update(group_id, values, old_obj)
|
||||
|
||||
def add_user(self, user_dn, group_id, user_id):
|
||||
group_ref = self.get(group_id)
|
||||
group_dn = group_ref['dn']
|
||||
try:
|
||||
super(GroupApi, self).add_member(user_dn, group_dn)
|
||||
super().add_member(user_dn, group_dn)
|
||||
except exception.Conflict:
|
||||
raise exception.Conflict(
|
||||
_('User %(user_id)s is already a member of group %(group_id)s')
|
||||
@ -413,13 +411,13 @@ class GroupApi(common_ldap.BaseLdap):
|
||||
"""Return a list of groups for which the user is a member."""
|
||||
user_dn_esc = ldap.filter.escape_filter_chars(user_dn)
|
||||
if self.group_ad_nesting:
|
||||
query = '(%s:%s:=%s)' % (
|
||||
query = '({}:{}:={})'.format(
|
||||
self.member_attribute,
|
||||
LDAP_MATCHING_RULE_IN_CHAIN,
|
||||
user_dn_esc,
|
||||
)
|
||||
else:
|
||||
query = '(%s=%s)' % (self.member_attribute, user_dn_esc)
|
||||
query = f'({self.member_attribute}={user_dn_esc})'
|
||||
return self.get_all(query)
|
||||
|
||||
def list_user_groups_filtered(self, user_dn, hints):
|
||||
@ -429,12 +427,12 @@ class GroupApi(common_ldap.BaseLdap):
|
||||
# Hardcoded to member as that is how the Matching Rule in Chain
|
||||
# Mechanisms expects it. The member_attribute might actually be
|
||||
# member_of elsewhere, so they are not the same.
|
||||
query = '(member:%s:=%s)' % (
|
||||
query = '(member:{}:={})'.format(
|
||||
LDAP_MATCHING_RULE_IN_CHAIN,
|
||||
user_dn_esc,
|
||||
)
|
||||
else:
|
||||
query = '(%s=%s)' % (self.member_attribute, user_dn_esc)
|
||||
query = f'({self.member_attribute}={user_dn_esc})'
|
||||
return self.get_all_filtered(hints, query)
|
||||
|
||||
def list_group_users(self, group_id):
|
||||
|
@ -41,7 +41,7 @@ class Identity(base.IdentityDriverBase):
|
||||
# config parameter to enable sql to be used as a domain-specific driver.
|
||||
def __init__(self, conf=None):
|
||||
self.conf = conf
|
||||
super(Identity, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def is_sql(self):
|
||||
|
@ -258,7 +258,7 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
return None
|
||||
|
||||
def to_dict(self, include_extra_dict=False):
|
||||
d = super(User, self).to_dict(include_extra_dict=include_extra_dict)
|
||||
d = super().to_dict(include_extra_dict=include_extra_dict)
|
||||
if 'default_project_id' in d and d['default_project_id'] is None:
|
||||
del d['default_project_id']
|
||||
# NOTE(notmorgan): Eventually it may make sense to drop the empty
|
||||
@ -291,7 +291,7 @@ class User(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
if opt_value is not None:
|
||||
opt.validator(opt_value)
|
||||
resource_options[opt.option_id] = opt_value
|
||||
user_obj = super(User, cls).from_dict(new_dict)
|
||||
user_obj = super().from_dict(new_dict)
|
||||
setattr(user_obj, '_resource_options', resource_options)
|
||||
return user_obj
|
||||
|
||||
|
@ -605,7 +605,7 @@ class Manager(manager.Manager):
|
||||
_GROUP = 'group'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.identity.driver)
|
||||
super().__init__(CONF.identity.driver)
|
||||
self.domain_configs = DomainConfigs()
|
||||
notifications.register_event_callback(
|
||||
notifications.ACTIONS.internal,
|
||||
@ -1785,7 +1785,7 @@ class MappingManager(manager.Manager):
|
||||
_provides_api = 'id_mapping_api'
|
||||
|
||||
def __init__(self):
|
||||
super(MappingManager, self).__init__(CONF.identity_mapping.driver)
|
||||
super().__init__(CONF.identity_mapping.driver)
|
||||
|
||||
@MEMOIZE_ID_MAPPING
|
||||
def _get_public_id(self, domain_id, local_id, entity_type):
|
||||
@ -1851,4 +1851,4 @@ class ShadowUsersManager(manager.Manager):
|
||||
def __init__(self):
|
||||
shadow_driver = CONF.shadow_users.driver
|
||||
|
||||
super(ShadowUsersManager, self).__init__(shadow_driver)
|
||||
super().__init__(shadow_driver)
|
||||
|
@ -31,10 +31,10 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'id_generator_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.identity_mapping.generator)
|
||||
super().__init__(CONF.identity_mapping.generator)
|
||||
|
||||
|
||||
class IDGenerator(object, metaclass=abc.ABCMeta):
|
||||
class IDGenerator(metaclass=abc.ABCMeta):
|
||||
"""Interface description for an ID Generator provider."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -18,9 +18,7 @@ from keystone.common import provider_api
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class MappingDriverBase(
|
||||
provider_api.ProviderAPIMixin, object, metaclass=abc.ABCMeta
|
||||
):
|
||||
class MappingDriverBase(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
"""Interface description for an ID Mapping driver."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -13,6 +13,6 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
class EntityType(object):
|
||||
class EntityType:
|
||||
USER = 'user'
|
||||
GROUP = 'group'
|
||||
|
@ -45,7 +45,7 @@ def federated_objects_to_list(fed_ref):
|
||||
return list(fed.values())
|
||||
|
||||
|
||||
class ShadowUsersDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class ShadowUsersDriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface description for an Shadow Users driver."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -22,7 +22,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class UnifiedLimitDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class UnifiedLimitDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.unified_limit.list_limit or CONF.list_limit
|
||||
|
@ -49,7 +49,7 @@ class RegisteredLimitModel(sql.ModelBase, sql.ModelDictMixin):
|
||||
description = sql.Column(sql.Text())
|
||||
|
||||
def to_dict(self):
|
||||
ref = super(RegisteredLimitModel, self).to_dict()
|
||||
ref = super().to_dict()
|
||||
ref.pop('internal_id')
|
||||
return ref
|
||||
|
||||
@ -112,7 +112,7 @@ class LimitModel(sql.ModelBase, sql.ModelDictMixin):
|
||||
return RegisteredLimitModel.resource_name
|
||||
|
||||
def to_dict(self):
|
||||
ref = super(LimitModel, self).to_dict()
|
||||
ref = super().to_dict()
|
||||
if self.registered_limit:
|
||||
ref['service_id'] = self.registered_limit.service_id
|
||||
ref['region_id'] = self.registered_limit.region_id
|
||||
|
@ -34,7 +34,7 @@ class Manager(manager.Manager):
|
||||
|
||||
def __init__(self):
|
||||
unified_limit_driver = CONF.unified_limit.driver
|
||||
super(Manager, self).__init__(unified_limit_driver)
|
||||
super().__init__(unified_limit_driver)
|
||||
|
||||
self.enforcement_model = base.load_driver(
|
||||
CONF.unified_limit.enforcement_model
|
||||
|
@ -31,7 +31,7 @@ def load_driver(driver_name, *args):
|
||||
raise ImportError(msg % {'name': driver_name, 'namespace': namespace})
|
||||
|
||||
|
||||
class ModelBase(object, metaclass=abc.ABCMeta):
|
||||
class ModelBase(metaclass=abc.ABCMeta):
|
||||
"""Interface for a limit model driver."""
|
||||
|
||||
NAME = None
|
||||
|
@ -26,7 +26,7 @@ LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class ReceiptModel(object):
|
||||
class ReceiptModel:
|
||||
"""An object that represents a receipt emitted by keystone.
|
||||
|
||||
This is a queryable object that other parts of keystone can use to reason
|
||||
@ -121,7 +121,7 @@ class ReceiptModel(object):
|
||||
self.issued_at = issued_at
|
||||
|
||||
|
||||
class _ReceiptModelHandler(object):
|
||||
class _ReceiptModelHandler:
|
||||
identity = 125
|
||||
handles = (ReceiptModel,)
|
||||
|
||||
|
@ -80,7 +80,7 @@ def blank_token_data(issued_at):
|
||||
return token_data
|
||||
|
||||
|
||||
class RevokeEvent(object):
|
||||
class RevokeEvent:
|
||||
def __init__(self, **kwargs):
|
||||
for k in REVOKE_KEYS:
|
||||
v = kwargs.get(k)
|
||||
@ -287,7 +287,7 @@ def build_token_values(token):
|
||||
return token_values
|
||||
|
||||
|
||||
class _RevokeEventHandler(object):
|
||||
class _RevokeEventHandler:
|
||||
# NOTE(morganfainberg): There needs to be reserved "registry" entries set
|
||||
# in oslo_serialization for application-specific handlers. We picked 127
|
||||
# here since it's waaaaaay far out before oslo_serialization will use it.
|
||||
|
@ -33,7 +33,7 @@ VERSIONS = frozenset([V3])
|
||||
ACCESS_RULES_MIN_VERSION = 1.0
|
||||
|
||||
|
||||
class TokenModel(object):
|
||||
class TokenModel:
|
||||
"""An object that represents a token emitted by keystone.
|
||||
|
||||
This is a queryable object that other parts of keystone can use to reason
|
||||
@ -314,9 +314,9 @@ class TokenModel(object):
|
||||
effective_trust_roles = PROVIDERS.assignment_api.add_implied_roles(
|
||||
trust_roles
|
||||
)
|
||||
effective_trust_role_ids = set(
|
||||
[r['role_id'] for r in effective_trust_roles]
|
||||
)
|
||||
effective_trust_role_ids = {
|
||||
r['role_id'] for r in effective_trust_roles
|
||||
}
|
||||
|
||||
current_effective_trustor_roles = (
|
||||
PROVIDERS.assignment_api.get_roles_for_trustor_and_project(
|
||||
@ -436,7 +436,7 @@ class TokenModel(object):
|
||||
domain_id=self.domain_id,
|
||||
effective=True,
|
||||
)
|
||||
user_roles = list(set([x['role_id'] for x in assignment_list]))
|
||||
user_roles = list({x['role_id'] for x in assignment_list})
|
||||
|
||||
for role in app_cred_roles:
|
||||
if role['id'] in user_roles:
|
||||
@ -561,9 +561,9 @@ class TokenModel(object):
|
||||
effective_trust_roles = PROVIDERS.assignment_api.add_implied_roles(
|
||||
refs
|
||||
)
|
||||
effective_trust_role_ids = set(
|
||||
[r['role_id'] for r in effective_trust_roles]
|
||||
)
|
||||
effective_trust_role_ids = {
|
||||
r['role_id'] for r in effective_trust_roles
|
||||
}
|
||||
current_effective_trustor_roles = (
|
||||
PROVIDERS.assignment_api.get_roles_for_trustor_and_project(
|
||||
self.trustor['id'], self.trust.get('project_id')
|
||||
@ -603,7 +603,7 @@ class TokenModel(object):
|
||||
self.issued_at = issued_at
|
||||
|
||||
|
||||
class _TokenModelHandler(object):
|
||||
class _TokenModelHandler:
|
||||
identity = 126
|
||||
handles = (TokenModel,)
|
||||
|
||||
|
@ -120,7 +120,7 @@ def build_audit_initiator():
|
||||
return initiator
|
||||
|
||||
|
||||
class Audit(object):
|
||||
class Audit:
|
||||
"""Namespace for audit notification functions.
|
||||
|
||||
This is a namespace object to contain all of the direct notification
|
||||
@ -567,8 +567,8 @@ def _create_cadf_payload(
|
||||
target = resource.Resource(typeURI=target_uri, id=resource_id)
|
||||
|
||||
audit_kwargs = {'resource_info': resource_id}
|
||||
cadf_action = '%s.%s' % (operation, resource_type)
|
||||
event_type = '%s.%s.%s' % (SERVICE, resource_type, operation)
|
||||
cadf_action = f'{operation}.{resource_type}'
|
||||
event_type = f'{SERVICE}.{resource_type}.{operation}'
|
||||
|
||||
_send_audit_notification(
|
||||
cadf_action,
|
||||
@ -577,7 +577,7 @@ def _create_cadf_payload(
|
||||
target,
|
||||
event_type,
|
||||
reason=reason,
|
||||
**audit_kwargs
|
||||
**audit_kwargs,
|
||||
)
|
||||
|
||||
|
||||
@ -625,11 +625,11 @@ def _send_notification(
|
||||
notifier = _get_notifier()
|
||||
if notifier:
|
||||
context = {}
|
||||
event_type = '%(service)s.%(resource_type)s.%(operation)s' % {
|
||||
'service': SERVICE,
|
||||
'resource_type': resource_type,
|
||||
'operation': operation,
|
||||
}
|
||||
event_type = '{service}.{resource_type}.{operation}'.format(
|
||||
service=SERVICE,
|
||||
resource_type=resource_type,
|
||||
operation=operation,
|
||||
)
|
||||
if _check_notification_opt_out(event_type, outcome=None):
|
||||
return
|
||||
try:
|
||||
@ -685,7 +685,7 @@ def _get_request_audit_info(context, user_id=None):
|
||||
return initiator
|
||||
|
||||
|
||||
class CadfNotificationWrapper(object):
|
||||
class CadfNotificationWrapper:
|
||||
"""Send CADF event notifications for various methods.
|
||||
|
||||
This function is only used for Authentication events. Its ``action`` and
|
||||
@ -703,7 +703,7 @@ class CadfNotificationWrapper(object):
|
||||
|
||||
def __init__(self, operation):
|
||||
self.action = operation
|
||||
self.event_type = '%s.%s' % (SERVICE, operation)
|
||||
self.event_type = f'{SERVICE}.{operation}'
|
||||
|
||||
def __call__(self, f):
|
||||
@functools.wraps(f)
|
||||
@ -754,7 +754,7 @@ class CadfNotificationWrapper(object):
|
||||
return wrapper
|
||||
|
||||
|
||||
class CadfRoleAssignmentNotificationWrapper(object):
|
||||
class CadfRoleAssignmentNotificationWrapper:
|
||||
"""Send CADF notifications for ``role_assignment`` methods.
|
||||
|
||||
This function is only used for role assignment events. Its ``action`` and
|
||||
@ -773,8 +773,8 @@ class CadfRoleAssignmentNotificationWrapper(object):
|
||||
ROLE_ASSIGNMENT = 'role_assignment'
|
||||
|
||||
def __init__(self, operation):
|
||||
self.action = '%s.%s' % (operation, self.ROLE_ASSIGNMENT)
|
||||
self.event_type = '%s.%s.%s' % (
|
||||
self.action = f'{operation}.{self.ROLE_ASSIGNMENT}'
|
||||
self.event_type = '{}.{}.{}'.format(
|
||||
SERVICE,
|
||||
self.ROLE_ASSIGNMENT,
|
||||
operation,
|
||||
@ -849,7 +849,7 @@ class CadfRoleAssignmentNotificationWrapper(object):
|
||||
taxonomy.OUTCOME_FAILURE,
|
||||
target,
|
||||
self.event_type,
|
||||
**audit_kwargs
|
||||
**audit_kwargs,
|
||||
)
|
||||
raise
|
||||
else:
|
||||
@ -859,7 +859,7 @@ class CadfRoleAssignmentNotificationWrapper(object):
|
||||
taxonomy.OUTCOME_SUCCESS,
|
||||
target,
|
||||
self.event_type,
|
||||
**audit_kwargs
|
||||
**audit_kwargs,
|
||||
)
|
||||
return result
|
||||
|
||||
@ -900,11 +900,11 @@ def send_saml_audit_notification(
|
||||
groups=group_ids,
|
||||
)
|
||||
initiator.credential = cred
|
||||
event_type = '%s.%s' % (SERVICE, action)
|
||||
event_type = f'{SERVICE}.{action}'
|
||||
_send_audit_notification(action, initiator, outcome, target, event_type)
|
||||
|
||||
|
||||
class _CatalogHelperObj(provider_api.ProviderAPIMixin, object):
|
||||
class _CatalogHelperObj(provider_api.ProviderAPIMixin):
|
||||
"""A helper object to allow lookups of identity service id."""
|
||||
|
||||
|
||||
|
@ -53,7 +53,7 @@ def filter_consumer(consumer_ref):
|
||||
return consumer_ref
|
||||
|
||||
|
||||
class Oauth1DriverBase(object, metaclass=abc.ABCMeta):
|
||||
class Oauth1DriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface description for an OAuth1 driver."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -131,7 +131,7 @@ class OAuth1(base.Oauth1DriverBase):
|
||||
def _delete_request_tokens(self, session, consumer_id):
|
||||
q = session.query(RequestToken)
|
||||
req_tokens = q.filter_by(consumer_id=consumer_id)
|
||||
req_tokens_list = set([x.id for x in req_tokens])
|
||||
req_tokens_list = {x.id for x in req_tokens}
|
||||
for token_id in req_tokens_list:
|
||||
token_ref = self._get_request_token(session, token_id)
|
||||
session.delete(token_ref)
|
||||
@ -139,7 +139,7 @@ class OAuth1(base.Oauth1DriverBase):
|
||||
def _delete_access_tokens(self, session, consumer_id):
|
||||
q = session.query(AccessToken)
|
||||
acc_tokens = q.filter_by(consumer_id=consumer_id)
|
||||
acc_tokens_list = set([x.id for x in acc_tokens])
|
||||
acc_tokens_list = {x.id for x in acc_tokens}
|
||||
for token_id in acc_tokens_list:
|
||||
token_ref = self._get_access_token(session, token_id)
|
||||
session.delete(token_ref)
|
||||
|
@ -37,7 +37,7 @@ RequestTokenEndpoint = oauth1.RequestTokenEndpoint
|
||||
oRequest = oauthlib.common.Request
|
||||
|
||||
|
||||
class Token(object):
|
||||
class Token:
|
||||
def __init__(self, key, secret):
|
||||
self.key = key
|
||||
self.secret = secret
|
||||
@ -130,7 +130,7 @@ class Manager(manager.Manager):
|
||||
_CONSUMER = "OS-OAUTH1:consumer"
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.oauth1.driver)
|
||||
super().__init__(CONF.oauth1.driver)
|
||||
|
||||
def create_consumer(self, consumer_ref, initiator=None):
|
||||
consumer_ref = consumer_ref.copy()
|
||||
|
@ -19,7 +19,7 @@ from keystone import exception
|
||||
CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class PolicyDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class PolicyDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.policy.list_limit or CONF.list_limit
|
||||
|
@ -37,7 +37,7 @@ class Manager(manager.Manager):
|
||||
_POLICY = 'policy'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.policy.driver)
|
||||
super().__init__(CONF.policy.driver)
|
||||
|
||||
def create_policy(self, policy_id, policy, initiator=None):
|
||||
ref = self.driver.create_policy(policy_id, policy)
|
||||
|
@ -66,7 +66,7 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'receipt_provider_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.receipt.provider)
|
||||
super().__init__(CONF.receipt.provider)
|
||||
self._register_callback_listeners()
|
||||
|
||||
def _register_callback_listeners(self):
|
||||
|
@ -17,7 +17,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class Provider(object, metaclass=abc.ABCMeta):
|
||||
class Provider(metaclass=abc.ABCMeta):
|
||||
"""Interface description for a Receipt provider."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -28,7 +28,7 @@ CONF = keystone.conf.CONF
|
||||
|
||||
class Provider(base.Provider):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Provider, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# NOTE(lbragstad): We add these checks here because if the fernet
|
||||
# provider is going to be used and either the `key_repository` is empty
|
||||
|
@ -39,7 +39,7 @@ TIMESTAMP_START = 1
|
||||
TIMESTAMP_END = 9
|
||||
|
||||
|
||||
class ReceiptFormatter(object):
|
||||
class ReceiptFormatter:
|
||||
"""Packs and unpacks payloads into receipts for transport."""
|
||||
|
||||
@property
|
||||
@ -181,7 +181,7 @@ class ReceiptFormatter(object):
|
||||
return (user_id, methods, issued_at, expires_at)
|
||||
|
||||
|
||||
class ReceiptPayload(object):
|
||||
class ReceiptPayload:
|
||||
|
||||
@classmethod
|
||||
def assemble(cls, user_id, methods, expires_at):
|
||||
|
@ -37,7 +37,7 @@ def get_project_from_domain(domain_ref):
|
||||
NULL_DOMAIN_ID = '<<keystone.domain.root>>'
|
||||
|
||||
|
||||
class ResourceDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class ResourceDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.resource.list_limit or CONF.list_limit
|
||||
|
@ -136,7 +136,7 @@ class Resource(base.ResourceDriverBase):
|
||||
with sql.session_for_read() as session:
|
||||
children = self._get_children(session, [project_id])
|
||||
subtree = []
|
||||
examined = set([project_id])
|
||||
examined = {project_id}
|
||||
while children:
|
||||
children_ids = set()
|
||||
for ref in children:
|
||||
|
@ -28,7 +28,7 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
# to represent null, as defined in NULL_DOMAIN_ID above.
|
||||
|
||||
def to_dict(self, include_extra_dict=False):
|
||||
d = super(Project, self).to_dict(include_extra_dict=include_extra_dict)
|
||||
d = super().to_dict(include_extra_dict=include_extra_dict)
|
||||
if d['domain_id'] == base.NULL_DOMAIN_ID:
|
||||
d['domain_id'] = None
|
||||
# NOTE(notmorgan): Eventually it may make sense to drop the empty
|
||||
@ -49,7 +49,7 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
if opt_value is not None:
|
||||
opt.validator(opt_value)
|
||||
resource_options[opt.option_id] = opt_value
|
||||
project_obj = super(Project, cls).from_dict(new_dict)
|
||||
project_obj = super().from_dict(new_dict)
|
||||
setattr(project_obj, '_resource_options', resource_options)
|
||||
return project_obj
|
||||
|
||||
@ -118,7 +118,7 @@ class Project(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
class ProjectTag(sql.ModelBase, sql.ModelDictMixin):
|
||||
|
||||
def to_dict(self):
|
||||
d = super(ProjectTag, self).to_dict()
|
||||
d = super().to_dict()
|
||||
return d
|
||||
|
||||
__tablename__ = 'project_tag'
|
||||
|
@ -17,7 +17,7 @@ import abc
|
||||
from keystone import exception
|
||||
|
||||
|
||||
class DomainConfigDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class DomainConfigDriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface description for a Domain Config driver."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -24,7 +24,7 @@ class WhiteListedConfig(sql.ModelBase, sql.ModelDictMixin):
|
||||
value = sql.Column(sql.JsonBlob(), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
d = super(WhiteListedConfig, self).to_dict()
|
||||
d = super().to_dict()
|
||||
d.pop('domain_id')
|
||||
return d
|
||||
|
||||
@ -37,7 +37,7 @@ class SensitiveConfig(sql.ModelBase, sql.ModelDictMixin):
|
||||
value = sql.Column(sql.JsonBlob(), nullable=False)
|
||||
|
||||
def to_dict(self):
|
||||
d = super(SensitiveConfig, self).to_dict()
|
||||
d = super().to_dict()
|
||||
d.pop('domain_id')
|
||||
return d
|
||||
|
||||
|
@ -54,7 +54,7 @@ class Manager(manager.Manager):
|
||||
|
||||
def __init__(self):
|
||||
resource_driver = CONF.resource.driver
|
||||
super(Manager, self).__init__(resource_driver)
|
||||
super().__init__(resource_driver)
|
||||
|
||||
def _get_hierarchy_depth(self, parents_list):
|
||||
return len(parents_list) + 1
|
||||
@ -625,7 +625,7 @@ class Manager(manager.Manager):
|
||||
user_projects = PROVIDERS.assignment_api.list_projects_for_user(
|
||||
user_id
|
||||
)
|
||||
user_projects_ids = set([proj['id'] for proj in user_projects])
|
||||
user_projects_ids = {proj['id'] for proj in user_projects}
|
||||
# Keep only the projects present in user_projects
|
||||
return [
|
||||
proj for proj in projects_list if proj['id'] in user_projects_ids
|
||||
@ -1229,7 +1229,7 @@ class DomainConfigManager(manager.Manager):
|
||||
sensitive_options = {'identity': [], 'ldap': ['password']}
|
||||
|
||||
def __init__(self):
|
||||
super(DomainConfigManager, self).__init__(CONF.domain_config.driver)
|
||||
super().__init__(CONF.domain_config.driver)
|
||||
|
||||
def _assert_valid_config(self, config):
|
||||
"""Ensure the options in the config are valid.
|
||||
|
@ -32,7 +32,7 @@ def revoked_before_cutoff_time():
|
||||
return oldest
|
||||
|
||||
|
||||
class RevokeDriverBase(object, metaclass=abc.ABCMeta):
|
||||
class RevokeDriverBase(metaclass=abc.ABCMeta):
|
||||
"""Interface for recording and reporting revocation events."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -44,7 +44,7 @@ class Manager(manager.Manager):
|
||||
_provides_api = 'revoke_api'
|
||||
|
||||
def __init__(self):
|
||||
super(Manager, self).__init__(CONF.revoke.driver)
|
||||
super().__init__(CONF.revoke.driver)
|
||||
self._register_listeners()
|
||||
self.model = revoke_model
|
||||
|
||||
|
@ -180,7 +180,7 @@ def _remove_content_type_on_204(resp):
|
||||
return resp
|
||||
|
||||
|
||||
class APIBase(object, metaclass=abc.ABCMeta):
|
||||
class APIBase(metaclass=abc.ABCMeta):
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
@ -373,12 +373,12 @@ class APIBase(object, metaclass=abc.ABCMeta):
|
||||
# sure to use the correct path-key for ID.
|
||||
member_id_key = getattr(r, '_id_path_param_name_override')
|
||||
else:
|
||||
member_id_key = '%(member_key)s_id' % {'member_key': m_key}
|
||||
member_id_key = f'{m_key}_id'
|
||||
|
||||
entity_path = '/%(collection)s/<string:%(member)s>' % {
|
||||
'collection': c_key,
|
||||
'member': member_id_key,
|
||||
}
|
||||
entity_path = '/{collection}/<string:{member}>'.format(
|
||||
collection=c_key,
|
||||
member=member_id_key,
|
||||
)
|
||||
# NOTE(morgan): The json-home form of the entity path is different
|
||||
# from the flask-url routing form. Must also include the prefix
|
||||
jh_e_path = _URL_SUBST.sub(
|
||||
@ -419,10 +419,10 @@ class APIBase(object, metaclass=abc.ABCMeta):
|
||||
)
|
||||
# NOTE(morgan): Add the prefix explicitly for JSON Home documents
|
||||
# to the collection path.
|
||||
href_val = '%(pfx)s%(collection_path)s' % {
|
||||
'pfx': self._api_url_prefix,
|
||||
'collection_path': collection_path,
|
||||
}
|
||||
href_val = '{pfx}{collection_path}'.format(
|
||||
pfx=self._api_url_prefix,
|
||||
collection_path=collection_path,
|
||||
)
|
||||
|
||||
# If additional parameters exist in the URL, add them to the
|
||||
# href-vars dict.
|
||||
@ -522,10 +522,10 @@ class APIBase(object, metaclass=abc.ABCMeta):
|
||||
resource_data = {}
|
||||
# NOTE(morgan): JSON Home form of the URL is different
|
||||
# from FLASK, do the conversion here.
|
||||
conv_url = '%(pfx)s/%(url)s' % {
|
||||
'url': _URL_SUBST.sub('{\\1}', r.url).lstrip('/'),
|
||||
'pfx': self._api_url_prefix,
|
||||
}
|
||||
conv_url = '{pfx}/{url}'.format(
|
||||
url=_URL_SUBST.sub('{\\1}', r.url).lstrip('/'),
|
||||
pfx=self._api_url_prefix,
|
||||
)
|
||||
|
||||
if r.json_home_data.path_vars:
|
||||
resource_data['href-template'] = conv_url
|
||||
@ -650,7 +650,7 @@ class APIBase(object, metaclass=abc.ABCMeta):
|
||||
return inst
|
||||
|
||||
|
||||
class _AttributeRaisesError(object):
|
||||
class _AttributeRaisesError:
|
||||
# NOTE(morgan): This is a special case class that exists to effectively
|
||||
# create a @classproperty style function. We use __get__ to raise the
|
||||
# exception.
|
||||
|
@ -50,7 +50,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
JSON_ENCODE_CONTENT_TYPES = set(['application/json', 'application/json-home'])
|
||||
JSON_ENCODE_CONTENT_TYPES = {'application/json', 'application/json-home'}
|
||||
|
||||
# minimum access rules support
|
||||
ACCESS_RULES_MIN_VERSION = token_model.ACCESS_RULES_MIN_VERSION
|
||||
@ -240,9 +240,7 @@ class AuthContextMiddleware(
|
||||
kwargs_to_fetch_token = True
|
||||
|
||||
def __init__(self, app):
|
||||
super(AuthContextMiddleware, self).__init__(
|
||||
app, log=LOG, service_type='identity'
|
||||
)
|
||||
super().__init__(app, log=LOG, service_type='identity')
|
||||
self.token = None
|
||||
|
||||
def fetch_token(self, token, **kwargs):
|
||||
@ -364,7 +362,7 @@ class AuthContextMiddleware(
|
||||
request.environ[CONTEXT_ENV] = context_env
|
||||
|
||||
if not context_env.get('is_admin', False):
|
||||
resp = super(AuthContextMiddleware, self).process_request(request)
|
||||
resp = super().process_request(request)
|
||||
if resp:
|
||||
return resp
|
||||
if (
|
||||
|
@ -13,7 +13,7 @@
|
||||
# Flask Native URL Normalizing Middleware
|
||||
|
||||
|
||||
class URLNormalizingMiddleware(object):
|
||||
class URLNormalizingMiddleware:
|
||||
"""Middleware filter to handle URL normalization."""
|
||||
|
||||
# NOTE(morgan): This must be a middleware as changing 'PATH_INFO' after
|
||||
|
@ -13,7 +13,7 @@
|
||||
from keystone.i18n import _
|
||||
|
||||
|
||||
class AuthTestMixin(object):
|
||||
class AuthTestMixin:
|
||||
"""To hold auth building helper functions."""
|
||||
|
||||
def _build_auth_scope(
|
||||
|
@ -45,7 +45,7 @@ class BaseTestCase(testtools.TestCase, common_auth.AuthTestMixin):
|
||||
self.project_name = os.environ.get('KSTEST_PROJECT_NAME')
|
||||
self.project_domain_id = os.environ.get('KSTEST_PROJECT_DOMAIN_ID')
|
||||
|
||||
super(BaseTestCase, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
def _http_headers(self, token=None):
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
@ -93,7 +93,7 @@ class CheckForMutableDefaultArgs(BaseASTChecker):
|
||||
if isinstance(arg, self.MUTABLES):
|
||||
self.add_error(arg)
|
||||
|
||||
super(CheckForMutableDefaultArgs, self).generic_visit(node)
|
||||
super().generic_visit(node)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
@ -139,7 +139,7 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
}
|
||||
|
||||
def __init__(self, tree, filename):
|
||||
super(CheckForTranslationIssues, self).__init__(tree, filename)
|
||||
super().__init__(tree, filename)
|
||||
|
||||
self.logger_names = []
|
||||
self.logger_module_names = []
|
||||
@ -171,13 +171,13 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
def visit_Import(self, node):
|
||||
for alias in node.names:
|
||||
self._filter_imports(alias.name, alias)
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
def visit_ImportFrom(self, node):
|
||||
for alias in node.names:
|
||||
full_name = '%s.%s' % (node.module, alias.name)
|
||||
full_name = f'{node.module}.{alias.name}'
|
||||
self._filter_imports(full_name, alias)
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
def _find_name(self, node):
|
||||
"""Return the fully qualified name or a Name or Attribute."""
|
||||
@ -220,7 +220,7 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
node.targets[0], attr_node_types
|
||||
):
|
||||
# say no to: "x, y = ..."
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
target_name = self._find_name(node.targets[0])
|
||||
|
||||
@ -239,7 +239,7 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
if not isinstance(node.value, ast.Call):
|
||||
# node.value must be a call to getLogger
|
||||
self.assignments.pop(target_name, None)
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
# is this a call to an i18n function?
|
||||
if (
|
||||
@ -247,14 +247,14 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
and node.value.func.id in self.i18n_names
|
||||
):
|
||||
self.assignments[target_name] = node.value.func.id
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
if not isinstance(node.value.func, ast.Attribute) or not isinstance(
|
||||
node.value.func.value, attr_node_types
|
||||
):
|
||||
# function must be an attribute on an object like
|
||||
# logging.getLogger
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
object_name = self._find_name(node.value.func.value)
|
||||
func_name = node.value.func.attr
|
||||
@ -265,7 +265,7 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
):
|
||||
self.logger_names.append(target_name)
|
||||
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
def visit_Call(self, node):
|
||||
"""Look for the 'LOG.*' calls."""
|
||||
@ -278,9 +278,7 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
obj_name = self._find_name(node.func.value)
|
||||
method_name = node.func.attr
|
||||
else: # could be Subscript, Call or many more
|
||||
return super(CheckForTranslationIssues, self).generic_visit(
|
||||
node
|
||||
)
|
||||
return super().generic_visit(node)
|
||||
|
||||
# if dealing with a logger the method can't be "warn"
|
||||
if obj_name in self.logger_names and method_name == 'warn':
|
||||
@ -292,19 +290,15 @@ class CheckForTranslationIssues(BaseASTChecker):
|
||||
obj_name not in self.logger_names
|
||||
or method_name not in self.TRANS_HELPER_MAP
|
||||
):
|
||||
return super(CheckForTranslationIssues, self).generic_visit(
|
||||
node
|
||||
)
|
||||
return super().generic_visit(node)
|
||||
|
||||
# the call must have arguments
|
||||
if not node.args:
|
||||
return super(CheckForTranslationIssues, self).generic_visit(
|
||||
node
|
||||
)
|
||||
return super().generic_visit(node)
|
||||
|
||||
self._process_log_messages(node)
|
||||
|
||||
return super(CheckForTranslationIssues, self).generic_visit(node)
|
||||
return super().generic_visit(node)
|
||||
|
||||
def _process_log_messages(self, node):
|
||||
msg = node.args[0] # first arg to a logging method is the msg
|
||||
|
@ -26,7 +26,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _UserAccessRuleTests(object):
|
||||
class _UserAccessRuleTests:
|
||||
"""Test cases for anyone that has a valid user token."""
|
||||
|
||||
def test_user_can_get_their_access_rules(self):
|
||||
@ -50,7 +50,7 @@ class _UserAccessRuleTests(object):
|
||||
app_cred
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
self.user_id,
|
||||
app_cred['access_rules'][0]['id'],
|
||||
)
|
||||
@ -106,14 +106,14 @@ class _UserAccessRuleTests(object):
|
||||
app_cred['id']
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
self.user_id,
|
||||
access_rule_id,
|
||||
)
|
||||
c.delete(path, headers=self.headers)
|
||||
|
||||
|
||||
class _ProjectUsersTests(object):
|
||||
class _ProjectUsersTests:
|
||||
"""Users who have project role authorization observe the same behavior."""
|
||||
|
||||
def test_user_cannot_get_access_rules_for_other_users(self):
|
||||
@ -149,7 +149,7 @@ class _ProjectUsersTests(object):
|
||||
app_cred
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -253,7 +253,7 @@ class _ProjectUsersTests(object):
|
||||
app_cred['id']
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -275,7 +275,7 @@ class _ProjectUsersTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _SystemUserAccessRuleTests(object):
|
||||
class _SystemUserAccessRuleTests:
|
||||
"""Tests that are common across all system users."""
|
||||
|
||||
def test_user_can_list_access_rules_for_other_users(self):
|
||||
@ -335,7 +335,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -397,7 +397,7 @@ class SystemReaderTests(
|
||||
app_cred['id']
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -426,7 +426,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -488,7 +488,7 @@ class SystemMemberTests(
|
||||
app_cred['id']
|
||||
)
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -499,7 +499,7 @@ class SystemMemberTests(
|
||||
)
|
||||
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -528,7 +528,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -585,7 +585,7 @@ class SystemAdminTests(
|
||||
)
|
||||
|
||||
with self.test_client() as c:
|
||||
path = '/v3/users/%s/access_rules/%s' % (
|
||||
path = '/v3/users/{}/access_rules/{}'.format(
|
||||
user['id'],
|
||||
access_rule_id,
|
||||
)
|
||||
@ -611,7 +611,7 @@ class ProjectReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -654,7 +654,7 @@ class ProjectMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -697,7 +697,7 @@ class ProjectAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
|
@ -67,7 +67,7 @@ class _TestAppCredBase(base_classes.TestCaseWithBootstrap):
|
||||
return app_cred_data
|
||||
|
||||
def setUp(self):
|
||||
super(_TestAppCredBase, self).setUp()
|
||||
super().setUp()
|
||||
|
||||
# create a user and project for app cred testing
|
||||
new_user_ref = unit.new_user_ref(
|
||||
@ -122,7 +122,7 @@ class _TestAppCredBase(base_classes.TestCaseWithBootstrap):
|
||||
f.write(jsonutils.dumps(overridden_policies))
|
||||
|
||||
|
||||
class _DomainAndProjectUserTests(object):
|
||||
class _DomainAndProjectUserTests:
|
||||
"""Domain and project user tests.
|
||||
|
||||
Domain and project users should not be able to manage application
|
||||
@ -208,7 +208,7 @@ class _DomainAndProjectUserTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _SystemUserAndOwnerTests(object):
|
||||
class _SystemUserAndOwnerTests:
|
||||
"""Common default functionality for all system users and owner."""
|
||||
|
||||
def test_user_can_list_application_credentials(self):
|
||||
@ -291,7 +291,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -328,7 +328,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -365,7 +365,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -393,7 +393,7 @@ class OwnerTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(OwnerTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -509,7 +509,7 @@ class DomainAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -552,7 +552,7 @@ class DomainReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -595,7 +595,7 @@ class DomainMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -638,7 +638,7 @@ class ProjectAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -682,7 +682,7 @@ class ProjectReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -726,7 +726,7 @@ class ProjectMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
|
@ -29,7 +29,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _AssignmentTestUtilities(object):
|
||||
class _AssignmentTestUtilities:
|
||||
"""Useful utilities for setting up test assignments and assertions."""
|
||||
|
||||
def _setup_test_role_assignments(self):
|
||||
@ -119,7 +119,7 @@ class _AssignmentTestUtilities(object):
|
||||
return assignments
|
||||
|
||||
|
||||
class _SystemUserTests(object):
|
||||
class _SystemUserTests:
|
||||
"""Common functionality for system users regardless of default role."""
|
||||
|
||||
def test_user_can_list_all_role_assignments_in_the_deployment(self):
|
||||
@ -765,7 +765,7 @@ class _SystemUserTests(object):
|
||||
self.assertIn(assignment, expected)
|
||||
|
||||
|
||||
class _DomainUserTests(object):
|
||||
class _DomainUserTests:
|
||||
"""Common functionality for domain users."""
|
||||
|
||||
def _setup_test_role_assignments_for_domain(self):
|
||||
@ -1104,7 +1104,7 @@ class _DomainUserTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _ProjectUserTests(object):
|
||||
class _ProjectUserTests:
|
||||
|
||||
def test_user_cannot_list_all_assignments_in_their_project(self):
|
||||
with self.test_client() as c:
|
||||
@ -1195,7 +1195,7 @@ class _ProjectUserTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _ProjectReaderMemberTests(object):
|
||||
class _ProjectReaderMemberTests:
|
||||
def test_user_cannot_list_assignments_for_subtree(self):
|
||||
user = PROVIDERS.identity_api.create_user(
|
||||
unit.new_user_ref(domain_id=self.domain_id)
|
||||
@ -1230,7 +1230,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1273,7 +1273,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1316,7 +1316,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1346,7 +1346,7 @@ class DomainReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1393,7 +1393,7 @@ class DomainMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1460,7 +1460,7 @@ class DomainAdminTests(
|
||||
f.write(jsonutils.dumps(overridden_policies))
|
||||
|
||||
def setUp(self):
|
||||
super(DomainAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
self.policy_file_name = self.policy_file.file_name
|
||||
@ -1515,7 +1515,7 @@ class ProjectReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1569,7 +1569,7 @@ class ProjectMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1622,7 +1622,7 @@ class ProjectAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
self.policy_file_name = self.policy_file.file_name
|
||||
|
@ -25,7 +25,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _SystemUserOauth1ConsumerTests(object):
|
||||
class _SystemUserOauth1ConsumerTests:
|
||||
"""Common default functionality for all system users."""
|
||||
|
||||
def test_user_can_get_consumer(self):
|
||||
@ -41,7 +41,7 @@ class _SystemUserOauth1ConsumerTests(object):
|
||||
c.get('/v3/OS-OAUTH1/consumers', headers=self.headers)
|
||||
|
||||
|
||||
class _SystemReaderAndMemberOauth1ConsumerTests(object):
|
||||
class _SystemReaderAndMemberOauth1ConsumerTests:
|
||||
|
||||
def test_user_cannot_create_consumer(self):
|
||||
with self.test_client() as c:
|
||||
@ -80,7 +80,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -115,7 +115,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -149,7 +149,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
|
@ -28,7 +28,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _UserCredentialTests(object):
|
||||
class _UserCredentialTests:
|
||||
"""Test cases for anyone that has a valid user token."""
|
||||
|
||||
def test_user_can_create_credentials_for_themselves(self):
|
||||
@ -147,7 +147,7 @@ class _UserCredentialTests(object):
|
||||
c.delete(path, headers=self.headers)
|
||||
|
||||
|
||||
class _ProjectUsersTests(object):
|
||||
class _ProjectUsersTests:
|
||||
"""Users who have project role authorization observe the same behavior."""
|
||||
|
||||
def test_user_cannot_get_credentials_for_other_users(self):
|
||||
@ -444,7 +444,7 @@ class _ProjectUsersTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _SystemUserCredentialTests(object):
|
||||
class _SystemUserCredentialTests:
|
||||
"""Tests that are common across all system users."""
|
||||
|
||||
def test_user_can_list_credentials_for_other_users(self):
|
||||
@ -599,7 +599,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -759,7 +759,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -919,7 +919,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1063,7 +1063,7 @@ class ProjectReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1106,7 +1106,7 @@ class ProjectMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -1149,7 +1149,7 @@ class ProjectAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
@ -1206,7 +1206,7 @@ class ProjectReaderTestsEnforceScopeFalse(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTestsEnforceScopeFalse, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=False)
|
||||
@ -1249,7 +1249,7 @@ class ProjectMemberTestsEnforceScopeFalse(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTestsEnforceScopeFalse, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=False)
|
||||
@ -1292,7 +1292,7 @@ class ProjectAdminTestsEnforceScopeFalse(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTestsEnforceScopeFalse, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=False)
|
||||
|
@ -25,7 +25,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _SystemDomainAndProjectUserDomainConfigTests(object):
|
||||
class _SystemDomainAndProjectUserDomainConfigTests:
|
||||
|
||||
def test_user_can_get_security_compliance_domain_config(self):
|
||||
# Set the security compliance configuration options
|
||||
@ -94,7 +94,7 @@ class _SystemDomainAndProjectUserDomainConfigTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _SystemUserDomainConfigTests(object):
|
||||
class _SystemUserDomainConfigTests:
|
||||
|
||||
def test_user_can_get_domain_config(self):
|
||||
domain = PROVIDERS.resource_api.create_domain(
|
||||
@ -264,7 +264,7 @@ class _SystemUserDomainConfigTests(object):
|
||||
c.get('/v3/domains/config/ldap/url/default', headers=self.headers)
|
||||
|
||||
|
||||
class _SystemReaderMemberDomainAndProjectUserDomainConfigTests(object):
|
||||
class _SystemReaderMemberDomainAndProjectUserDomainConfigTests:
|
||||
|
||||
def test_user_cannot_create_domain_config(self):
|
||||
domain = PROVIDERS.resource_api.create_domain(
|
||||
@ -374,7 +374,7 @@ class _SystemReaderMemberDomainAndProjectUserDomainConfigTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _DomainAndProjectUserDomainConfigTests(object):
|
||||
class _DomainAndProjectUserDomainConfigTests:
|
||||
|
||||
def test_user_cannot_get_domain_config(self):
|
||||
domain = PROVIDERS.resource_api.create_domain(
|
||||
@ -463,7 +463,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -499,7 +499,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -534,7 +534,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -690,7 +690,7 @@ class DomainUserTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainUserTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -730,7 +730,7 @@ class ProjectUserTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectUserTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -759,7 +759,7 @@ class ProjectUserTestsWithoutEnforceScope(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectUserTestsWithoutEnforceScope, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
|
||||
|
@ -25,7 +25,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _SystemUserDomainRoleTests(object):
|
||||
class _SystemUserDomainRoleTests:
|
||||
"""Common default functionality for all system users."""
|
||||
|
||||
def test_user_can_list_domain_roles(self):
|
||||
@ -52,7 +52,7 @@ class _SystemUserDomainRoleTests(object):
|
||||
self.assertEqual(role['id'], r.json['role']['id'])
|
||||
|
||||
|
||||
class _SystemReaderAndMemberDomainRoleTests(object):
|
||||
class _SystemReaderAndMemberDomainRoleTests:
|
||||
"""Common default functionality for system readers and system members."""
|
||||
|
||||
def test_user_cannot_create_domain_roles(self):
|
||||
@ -100,7 +100,7 @@ class _SystemReaderAndMemberDomainRoleTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _DomainAndProjectUserDomainRoleTests(object):
|
||||
class _DomainAndProjectUserDomainRoleTests:
|
||||
"""Common functionality for all domain and project users."""
|
||||
|
||||
def test_user_cannot_list_domain_roles(self):
|
||||
@ -182,7 +182,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -217,7 +217,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -251,7 +251,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -314,7 +314,7 @@ class DomainUserTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainUserTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -352,7 +352,7 @@ class ProjectUserTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectUserTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -379,7 +379,7 @@ class ProjectUserTestsWithoutEnforceScope(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectUserTestsWithoutEnforceScope, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
|
||||
|
@ -28,7 +28,7 @@ CONF = keystone.conf.CONF
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class _SystemUserDomainTests(object):
|
||||
class _SystemUserDomainTests:
|
||||
|
||||
def test_user_can_list_domains(self):
|
||||
domain = PROVIDERS.resource_api.create_domain(
|
||||
@ -91,7 +91,7 @@ class _SystemUserDomainTests(object):
|
||||
self.assertEqual(domain['id'], r.json['domain']['id'])
|
||||
|
||||
|
||||
class _SystemMemberAndReaderDomainTests(object):
|
||||
class _SystemMemberAndReaderDomainTests:
|
||||
|
||||
def test_user_cannot_create_a_domain(self):
|
||||
create = {'domain': {'name': uuid.uuid4().hex}}
|
||||
@ -131,7 +131,7 @@ class _SystemMemberAndReaderDomainTests(object):
|
||||
)
|
||||
|
||||
|
||||
class _DomainReaderDomainTests(object):
|
||||
class _DomainReaderDomainTests:
|
||||
|
||||
def test_user_can_list_domains(self):
|
||||
# second domain, should be invisible to scoped reader
|
||||
@ -198,7 +198,7 @@ class _DomainReaderDomainTests(object):
|
||||
self.assertEqual(0, len(r.json['domains']))
|
||||
|
||||
|
||||
class _ProjectUserDomainTests(object):
|
||||
class _ProjectUserDomainTests:
|
||||
|
||||
def test_user_can_get_a_domain(self):
|
||||
with self.test_client() as c:
|
||||
@ -317,7 +317,7 @@ class SystemReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -354,7 +354,7 @@ class SystemMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -390,7 +390,7 @@ class SystemAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(SystemAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -447,7 +447,7 @@ class DomainUserTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(DomainUserTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -487,7 +487,7 @@ class ProjectReaderTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectReaderTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -533,7 +533,7 @@ class ProjectMemberTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectMemberTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.useFixture(ksfixtures.Policy(self.config_fixture))
|
||||
self.config_fixture.config(group='oslo_policy', enforce_scope=True)
|
||||
@ -579,7 +579,7 @@ class ProjectAdminTests(
|
||||
):
|
||||
|
||||
def setUp(self):
|
||||
super(ProjectAdminTests, self).setUp()
|
||||
super().setUp()
|
||||
self.loadapp()
|
||||
self.policy_file = self.useFixture(temporaryfile.SecureTempFile())
|
||||
self.policy_file_name = self.policy_file.file_name
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user