Ruff the code
Switch from black to ruff similarly to keystoneauth. Change-Id: Ieee72e8598ae182d9f6cc40d7f4c20e955a69b2d
This commit is contained in:
parent
f7ffacb7ad
commit
ae7b3a0534
@ -22,16 +22,12 @@ repos:
|
||||
# rev: v1.1.1
|
||||
# hooks:
|
||||
# - id: doc8
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.2
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.6.9
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: ['--py38-plus']
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.4.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: ['-S', '-l', '79']
|
||||
- id: ruff
|
||||
args: ['--fix', '--unsafe-fixes']
|
||||
- id: ruff-format
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: '1.7.9'
|
||||
hooks:
|
||||
|
@ -23,15 +23,9 @@
|
||||
# serve to show the default.
|
||||
|
||||
html_theme = 'openstackdocs'
|
||||
html_theme_options = {
|
||||
"sidebar_dropdown": "api_ref",
|
||||
"sidebar_mode": "toc",
|
||||
}
|
||||
html_theme_options = {"sidebar_dropdown": "api_ref", "sidebar_mode": "toc"}
|
||||
|
||||
extensions = [
|
||||
'os_api_ref',
|
||||
'openstackdocstheme',
|
||||
]
|
||||
extensions = ['os_api_ref', 'openstackdocstheme']
|
||||
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
|
@ -29,7 +29,7 @@ class KeycloakClient:
|
||||
}
|
||||
r = requests.post(self.token_endpoint(realm), data=params).json()
|
||||
headers = {
|
||||
'Authorization': ("Bearer %s" % r['access_token']),
|
||||
'Authorization': f"Bearer {r['access_token']}",
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
self.session.headers.update(headers)
|
||||
|
@ -285,7 +285,7 @@ texinfo_documents = [
|
||||
'keystone',
|
||||
'One line description of project.',
|
||||
'Miscellaneous',
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
|
@ -110,15 +110,15 @@ class ResourceBase(ks_flask.ResourceBase):
|
||||
loaded = cred['blob']
|
||||
|
||||
# Convert to the legacy format
|
||||
cred_data = dict(
|
||||
user_id=cred.get('user_id'),
|
||||
project_id=cred.get('project_id'),
|
||||
access=loaded.get('access'),
|
||||
secret=loaded.get('secret'),
|
||||
trust_id=loaded.get('trust_id'),
|
||||
app_cred_id=loaded.get('app_cred_id'),
|
||||
access_token_id=loaded.get('access_token_id'),
|
||||
)
|
||||
cred_data = {
|
||||
'user_id': cred.get('user_id'),
|
||||
'project_id': cred.get('project_id'),
|
||||
'access': loaded.get('access'),
|
||||
'secret': loaded.get('secret'),
|
||||
'trust_id': loaded.get('trust_id'),
|
||||
'app_cred_id': loaded.get('app_cred_id'),
|
||||
'access_token_id': loaded.get('access_token_id'),
|
||||
}
|
||||
|
||||
# validate the signature
|
||||
self._check_signature(cred_data, credentials)
|
||||
|
@ -24,7 +24,7 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
def build_prior_role_response_data(prior_role_id, prior_role_name):
|
||||
return {
|
||||
'id': prior_role_id,
|
||||
'links': {'self': ks_flask.base_url(path='/roles/%s' % prior_role_id)},
|
||||
'links': {'self': ks_flask.base_url(path=f'/roles/{prior_role_id}')},
|
||||
'name': prior_role_name,
|
||||
}
|
||||
|
||||
@ -33,7 +33,9 @@ def build_implied_role_response_data(implied_role):
|
||||
return {
|
||||
'id': implied_role['id'],
|
||||
'links': {
|
||||
'self': ks_flask.base_url(path='/roles/%s' % implied_role['id'])
|
||||
'self': ks_flask.base_url(
|
||||
path='/roles/{}'.format(implied_role['id'])
|
||||
)
|
||||
},
|
||||
'name': implied_role['name'],
|
||||
}
|
||||
|
@ -466,15 +466,15 @@ class AuthAPI(ks_flask.APIBase):
|
||||
resource=AuthProjectsResource,
|
||||
url='/auth/projects',
|
||||
alternate_urls=[
|
||||
dict(
|
||||
url='/OS-FEDERATION/projects',
|
||||
json_home=ks_flask.construct_json_home_data(
|
||||
{
|
||||
'url': '/OS-FEDERATION/projects',
|
||||
'json_home': ks_flask.construct_json_home_data(
|
||||
rel='projects',
|
||||
resource_relation_func=(
|
||||
json_home_relations.os_federation_resource_rel_func
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
],
|
||||
rel='auth_projects',
|
||||
resource_kwargs={},
|
||||
@ -483,15 +483,15 @@ class AuthAPI(ks_flask.APIBase):
|
||||
resource=AuthDomainsResource,
|
||||
url='/auth/domains',
|
||||
alternate_urls=[
|
||||
dict(
|
||||
url='/OS-FEDERATION/domains',
|
||||
json_home=ks_flask.construct_json_home_data(
|
||||
{
|
||||
'url': '/OS-FEDERATION/domains',
|
||||
'json_home': ks_flask.construct_json_home_data(
|
||||
rel='domains',
|
||||
resource_relation_func=(
|
||||
json_home_relations.os_federation_resource_rel_func
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
],
|
||||
rel='auth_domains',
|
||||
resource_kwargs={},
|
||||
@ -590,7 +590,4 @@ class AuthFederationAPI(ks_flask.APIBase):
|
||||
]
|
||||
|
||||
|
||||
APIs = (
|
||||
AuthAPI,
|
||||
AuthFederationAPI,
|
||||
)
|
||||
APIs = (AuthAPI, AuthFederationAPI)
|
||||
|
@ -286,7 +286,6 @@ class CredentialResource(ks_flask.ResourceBase):
|
||||
|
||||
|
||||
class CredentialAPI(ks_flask.APIBase):
|
||||
|
||||
_name = 'credentials'
|
||||
_import_name = __name__
|
||||
resource_mapping = [
|
||||
|
@ -31,12 +31,7 @@ def _get_versions_list(identity_url):
|
||||
'id': 'v3.14',
|
||||
'status': 'stable',
|
||||
'updated': '2020-04-07T00:00:00Z',
|
||||
'links': [
|
||||
{
|
||||
'rel': 'self',
|
||||
'href': identity_url,
|
||||
}
|
||||
],
|
||||
'links': [{'rel': 'self', 'href': identity_url}],
|
||||
'media-types': [
|
||||
{'base': 'application/json', 'type': MEDIA_TYPE_JSON % 'v3'}
|
||||
],
|
||||
@ -70,7 +65,7 @@ def get_versions():
|
||||
mimetype=MimeTypes.JSON_HOME,
|
||||
)
|
||||
else:
|
||||
identity_url = '%s/' % ks_flask.base_url()
|
||||
identity_url = f'{ks_flask.base_url()}/'
|
||||
versions = _get_versions_list(identity_url)
|
||||
# Set the preferred version to the latest "stable" version.
|
||||
# TODO(morgan): If we ever have more API versions find the latest
|
||||
@ -99,7 +94,7 @@ def get_version_v3():
|
||||
response=jsonutils.dumps(content), mimetype=MimeTypes.JSON_HOME
|
||||
)
|
||||
else:
|
||||
identity_url = '%s/' % ks_flask.base_url()
|
||||
identity_url = f'{ks_flask.base_url()}/'
|
||||
versions = _get_versions_list(identity_url)
|
||||
return flask.Response(
|
||||
response=jsonutils.dumps({'version': versions['v3']}),
|
||||
|
@ -67,7 +67,7 @@ class EndpointResource(ks_flask.ResourceBase):
|
||||
try:
|
||||
PROVIDERS.catalog_api.get_region(endpoint['region_id'])
|
||||
except exception.RegionNotFound:
|
||||
region = dict(id=endpoint['region_id'])
|
||||
region = {'id': endpoint['region_id']}
|
||||
PROVIDERS.catalog_api.create_region(
|
||||
region, initiator=notifications.build_audit_initiator()
|
||||
)
|
||||
|
@ -235,12 +235,11 @@ class EPFilterGroupsProjectsResource(ks_flask.ResourceBase):
|
||||
@classmethod
|
||||
def _add_self_referential_link(cls, ref, collection_name=None):
|
||||
url = (
|
||||
'/OS-EP-FILTER/endpoint_groups/%(endpoint_group_id)s'
|
||||
'/projects/%(project_id)s'
|
||||
% {
|
||||
'endpoint_group_id': ref['endpoint_group_id'],
|
||||
'project_id': ref['project_id'],
|
||||
}
|
||||
'/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
|
||||
'/projects/{project_id}'.format(
|
||||
endpoint_group_id=ref['endpoint_group_id'],
|
||||
project_id=ref['project_id'],
|
||||
)
|
||||
)
|
||||
ref.setdefault('links', {})
|
||||
ref['links']['self'] = url
|
||||
|
@ -99,7 +99,9 @@ class IdentityProvidersResource(_ResourceBase):
|
||||
"""
|
||||
base_path = ref['links'].get('self')
|
||||
if base_path is None:
|
||||
base_path = '/'.join(ks_flask.base_url(path='/%s' % ref['id']))
|
||||
base_path = '/'.join(
|
||||
ks_flask.base_url(path='/{}'.format(ref['id']))
|
||||
)
|
||||
|
||||
for name in ['protocols']:
|
||||
ref['links'][name] = '/'.join([base_path, name])
|
||||
@ -188,7 +190,6 @@ class _IdentityProvidersProtocolsResourceBase(_ResourceBase):
|
||||
|
||||
|
||||
class IDPProtocolsListResource(_IdentityProvidersProtocolsResourceBase):
|
||||
|
||||
def get(self, idp_id):
|
||||
"""List protocols for an IDP.
|
||||
|
||||
@ -205,7 +206,6 @@ class IDPProtocolsListResource(_IdentityProvidersProtocolsResourceBase):
|
||||
|
||||
|
||||
class IDPProtocolsCRUDResource(_IdentityProvidersProtocolsResourceBase):
|
||||
|
||||
def get(self, idp_id, protocol_id):
|
||||
"""Get protocols for an IDP.
|
||||
|
||||
@ -448,7 +448,6 @@ class SAML2MetadataResource(flask_restful.Resource):
|
||||
|
||||
|
||||
class OSFederationAuthResource(flask_restful.Resource):
|
||||
|
||||
@ks_flask.unenforced_api
|
||||
def get(self, idp_id, protocol_id):
|
||||
"""Authenticate from dedicated uri endpoint.
|
||||
|
@ -117,10 +117,10 @@ class ConsumerResource(ks_flask.ResourceBase):
|
||||
def delete(self, consumer_id):
|
||||
ENFORCER.enforce_call(action='identity:delete_consumer')
|
||||
reason = (
|
||||
'Invalidating token cache because consumer %(consumer_id)s has '
|
||||
f'Invalidating token cache because consumer {consumer_id} has '
|
||||
'been deleted. Authorization for users with OAuth tokens will be '
|
||||
'recalculated and enforced accordingly the next time they '
|
||||
'authenticate or validate a token.' % {'consumer_id': consumer_id}
|
||||
'authenticate or validate a token.'
|
||||
)
|
||||
notifications.invalidate_token_cache_notification(reason)
|
||||
PROVIDERS.oauth_api.delete_consumer(
|
||||
@ -191,12 +191,11 @@ class RequestTokenResource(_OAuth1ResourceBase):
|
||||
)
|
||||
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['request_secret'],
|
||||
key=token_ref['id'], secret=token_ref['request_secret']
|
||||
)
|
||||
|
||||
if CONF.oauth1.request_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at']
|
||||
expiry_bit = '&oauth_expires_at={}'.format(token_ref['expires_at'])
|
||||
result += expiry_bit
|
||||
|
||||
resp = flask.make_response(result, http.client.CREATED)
|
||||
@ -294,12 +293,11 @@ class AccessTokenResource(_OAuth1ResourceBase):
|
||||
)
|
||||
|
||||
result = 'oauth_token={key}&oauth_token_secret={secret}'.format(
|
||||
key=token_ref['id'],
|
||||
secret=token_ref['access_secret'],
|
||||
key=token_ref['id'], secret=token_ref['access_secret']
|
||||
)
|
||||
|
||||
if CONF.oauth1.access_token_duration > 0:
|
||||
expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at'])
|
||||
expiry_bit = '&oauth_expires_at={}'.format(token_ref['expires_at'])
|
||||
result += expiry_bit
|
||||
|
||||
resp = flask.make_response(result, http.client.CREATED)
|
||||
|
@ -37,7 +37,6 @@ _build_resource_relation = json_home_relations.os_oauth2_resource_rel_func
|
||||
|
||||
|
||||
class AccessTokenResource(ks_flask.ResourceBase):
|
||||
|
||||
def _method_not_allowed(self):
|
||||
"""Raise a method not allowed error."""
|
||||
raise exception.OAuth2OtherError(
|
||||
@ -361,11 +360,11 @@ class AccessTokenResource(ks_flask.ResourceBase):
|
||||
raise error
|
||||
client_cert_dn = {}
|
||||
for key in cert_subject_dn:
|
||||
client_cert_dn['SSL_CLIENT_SUBJECT_DN_%s' % key.upper()] = (
|
||||
client_cert_dn[f'SSL_CLIENT_SUBJECT_DN_{key.upper()}'] = (
|
||||
cert_subject_dn.get(key)
|
||||
)
|
||||
for key in cert_issuer_dn:
|
||||
client_cert_dn['SSL_CLIENT_ISSUER_DN_%s' % key.upper()] = (
|
||||
client_cert_dn[f'SSL_CLIENT_ISSUER_DN_{key.upper()}'] = (
|
||||
cert_issuer_dn.get(key)
|
||||
)
|
||||
|
||||
|
@ -102,7 +102,6 @@ class PolicyResource(ks_flask.ResourceBase):
|
||||
|
||||
|
||||
class EndpointPolicyResource(flask_restful.Resource):
|
||||
|
||||
def get(self, policy_id):
|
||||
ENFORCER.enforce_call(action='identity:list_endpoints_for_policy')
|
||||
PROVIDERS.policy_api.get_policy(policy_id)
|
||||
@ -120,7 +119,6 @@ class EndpointPolicyResource(flask_restful.Resource):
|
||||
|
||||
|
||||
class EndpointPolicyAssociations(flask_restful.Resource):
|
||||
|
||||
def get(self, policy_id, endpoint_id):
|
||||
action = 'identity:check_policy_association_for_endpoint'
|
||||
ENFORCER.enforce_call(action=action)
|
||||
@ -153,7 +151,6 @@ class EndpointPolicyAssociations(flask_restful.Resource):
|
||||
|
||||
|
||||
class ServicePolicyAssociations(flask_restful.Resource):
|
||||
|
||||
def get(self, policy_id, service_id):
|
||||
action = 'identity:check_policy_association_for_service'
|
||||
ENFORCER.enforce_call(action=action)
|
||||
@ -186,7 +183,6 @@ class ServicePolicyAssociations(flask_restful.Resource):
|
||||
|
||||
|
||||
class ServiceRegionPolicyAssociations(flask_restful.Resource):
|
||||
|
||||
def get(self, policy_id, service_id, region_id):
|
||||
action = 'identity:check_policy_association_for_region_and_service'
|
||||
ENFORCER.enforce_call(action=action)
|
||||
|
@ -315,16 +315,16 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
|
||||
if 'domain_id' in entity.get('indirect', {}):
|
||||
inherited_assignment = True
|
||||
formatted_link = (
|
||||
'/domains/%s' % entity['indirect']['domain_id']
|
||||
formatted_link = '/domains/{}'.format(
|
||||
entity['indirect']['domain_id']
|
||||
)
|
||||
elif 'project_id' in entity.get('indirect', {}):
|
||||
inherited_assignment = True
|
||||
formatted_link = (
|
||||
'/projects/%s' % entity['indirect']['project_id']
|
||||
formatted_link = '/projects/{}'.format(
|
||||
entity['indirect']['project_id']
|
||||
)
|
||||
else:
|
||||
formatted_link = '/projects/%s' % entity['project_id']
|
||||
formatted_link = '/projects/{}'.format(entity['project_id'])
|
||||
elif 'domain_id' in entity:
|
||||
if 'domain_name' in entity:
|
||||
formatted_entity['scope'] = {
|
||||
@ -337,7 +337,7 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
formatted_entity['scope'] = {
|
||||
'domain': {'id': entity['domain_id']}
|
||||
}
|
||||
formatted_link = '/domains/%s' % entity['domain_id']
|
||||
formatted_link = '/domains/{}'.format(entity['domain_id'])
|
||||
elif 'system' in entity:
|
||||
formatted_link = '/system'
|
||||
formatted_entity['scope'] = {'system': entity['system']}
|
||||
@ -356,13 +356,16 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
formatted_entity['user'] = {'id': entity['user_id']}
|
||||
if 'group_id' in entity.get('indirect', {}):
|
||||
membership_url = ks_flask.base_url(
|
||||
path='/groups/%s/users/%s'
|
||||
% (entity['indirect']['group_id'], entity['user_id'])
|
||||
path='/groups/{}/users/{}'.format(
|
||||
entity['indirect']['group_id'], entity['user_id']
|
||||
)
|
||||
)
|
||||
formatted_entity['links']['membership'] = membership_url
|
||||
formatted_link += '/groups/%s' % entity['indirect']['group_id']
|
||||
formatted_link += '/groups/{}'.format(
|
||||
entity['indirect']['group_id']
|
||||
)
|
||||
else:
|
||||
formatted_link += '/users/%s' % entity['user_id']
|
||||
formatted_link += '/users/{}'.format(entity['user_id'])
|
||||
elif 'group_id' in entity:
|
||||
if 'group_name' in entity:
|
||||
formatted_entity['group'] = {
|
||||
@ -375,7 +378,7 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
}
|
||||
else:
|
||||
formatted_entity['group'] = {'id': entity['group_id']}
|
||||
formatted_link += '/groups/%s' % entity['group_id']
|
||||
formatted_link += '/groups/{}'.format(entity['group_id'])
|
||||
|
||||
if 'role_name' in entity:
|
||||
formatted_entity['role'] = {
|
||||
@ -395,18 +398,17 @@ class RoleAssignmentsResource(ks_flask.ResourceBase):
|
||||
formatted_entity['role'] = {'id': entity['role_id']}
|
||||
prior_role_link = ''
|
||||
if 'role_id' in entity.get('indirect', {}):
|
||||
formatted_link += '/roles/%s' % entity['indirect']['role_id']
|
||||
formatted_link += '/roles/{}'.format(entity['indirect']['role_id'])
|
||||
prior_role_link = '/prior_role/{prior}/implies/{implied}'.format(
|
||||
prior=entity['role_id'],
|
||||
implied=entity['indirect']['role_id'],
|
||||
prior=entity['role_id'], implied=entity['indirect']['role_id']
|
||||
)
|
||||
else:
|
||||
formatted_link += '/roles/%s' % entity['role_id']
|
||||
formatted_link += '/roles/{}'.format(entity['role_id'])
|
||||
|
||||
if inherited_assignment:
|
||||
formatted_entity['scope']['OS-INHERIT:inherited_to'] = 'projects'
|
||||
formatted_link = (
|
||||
'/OS-INHERIT%s/inherited_to_projects' % formatted_link
|
||||
f'/OS-INHERIT{formatted_link}/inherited_to_projects'
|
||||
)
|
||||
|
||||
formatted_entity['links']['assignment'] = ks_flask.base_url(
|
||||
|
@ -36,7 +36,7 @@ class RoleInferencesResource(flask_restful.Resource):
|
||||
for role_ref in PROVIDERS.role_api.list_roles()
|
||||
}
|
||||
|
||||
rules = dict()
|
||||
rules = {}
|
||||
for ref in refs:
|
||||
implied_role_id = ref['implied_role_id']
|
||||
prior_role_id = ref['prior_role_id']
|
||||
@ -49,10 +49,7 @@ class RoleInferencesResource(flask_restful.Resource):
|
||||
rules[prior_role_id] = implied
|
||||
|
||||
inferences = []
|
||||
for (
|
||||
prior_id,
|
||||
implied,
|
||||
) in rules.items():
|
||||
for prior_id, implied in rules.items():
|
||||
prior_response = shared.build_prior_role_response_data(
|
||||
prior_id, role_dict[prior_id]['name']
|
||||
)
|
||||
|
@ -220,13 +220,12 @@ class RoleImplicationListResource(flask_restful.Resource):
|
||||
shared.build_implied_role_response_data(implied_role)
|
||||
)
|
||||
response_json['links'] = {
|
||||
'self': ks_flask.base_url(path='/roles/%s/implies' % prior_role_id)
|
||||
'self': ks_flask.base_url(path=f'/roles/{prior_role_id}/implies')
|
||||
}
|
||||
return response_json
|
||||
|
||||
|
||||
class RoleImplicationResource(flask_restful.Resource):
|
||||
|
||||
def head(self, prior_role_id, implied_role_id=None):
|
||||
# TODO(morgan): deprecate "check_implied_role" policy, as a user must
|
||||
# have both check_implied_role and get_implied_role to use the head
|
||||
@ -266,8 +265,7 @@ class RoleImplicationResource(flask_restful.Resource):
|
||||
)
|
||||
response_json['links'] = {
|
||||
'self': ks_flask.base_url(
|
||||
path='/roles/%(prior)s/implies/%(implies)s'
|
||||
% {'prior': prior_role_id, 'implies': implied_role_id}
|
||||
path=f'/roles/{prior_role_id}/implies/{implied_role_id}'
|
||||
)
|
||||
}
|
||||
return response_json
|
||||
|
@ -94,7 +94,7 @@ def _normalize_trust_roles(trust):
|
||||
|
||||
trust['roles'] = trust_full_roles
|
||||
trust['roles_links'] = {
|
||||
'self': ks_flask.base_url(path='/%s/roles' % trust['id']),
|
||||
'self': ks_flask.base_url(path='/{}/roles'.format(trust['id'])),
|
||||
'next': None,
|
||||
'previous': None,
|
||||
}
|
||||
@ -374,7 +374,6 @@ class TrustResource(ks_flask.ResourceBase):
|
||||
# URL additions and does not have a collection key/member_key, we use
|
||||
# the flask-restful Resource, not the keystone ResourceBase
|
||||
class RolesForTrustListResource(flask_restful.Resource):
|
||||
|
||||
@property
|
||||
def oslo_context(self):
|
||||
return flask.request.environ.get(context.REQUEST_CONTEXT_ENV, None)
|
||||
@ -429,7 +428,6 @@ class RolesForTrustListResource(flask_restful.Resource):
|
||||
# URL additions and does not have a collection key/member_key, we use
|
||||
# the flask-restful Resource, not the keystone ResourceBase
|
||||
class RoleForTrustResource(flask_restful.Resource):
|
||||
|
||||
@property
|
||||
def oslo_context(self):
|
||||
return flask.request.environ.get(context.REQUEST_CONTEXT_ENV, None)
|
||||
|
@ -66,7 +66,6 @@ def _convert_v3_to_ec2_credential(credential):
|
||||
|
||||
|
||||
def _format_token_entity(entity):
|
||||
|
||||
formatted_entity = entity.copy()
|
||||
access_token_id = formatted_entity['id']
|
||||
user_id = formatted_entity.get('authorizing_user_id', '')
|
||||
@ -76,8 +75,7 @@ def _format_token_entity(entity):
|
||||
formatted_entity.pop('access_secret')
|
||||
|
||||
url = (
|
||||
'/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s'
|
||||
'/roles' % {'user_id': user_id, 'access_token_id': access_token_id}
|
||||
f'/users/{user_id}/OS-OAUTH1/access_tokens/{access_token_id}' '/roles'
|
||||
)
|
||||
|
||||
formatted_entity.setdefault('links', {})
|
||||
@ -418,19 +416,19 @@ class UserOSEC2CredentialsResourceListCreate(_UserOSEC2CredBaseResource):
|
||||
PROVIDERS.identity_api.get_user(user_id)
|
||||
tenant_id = self.request_body_json.get('tenant_id')
|
||||
PROVIDERS.resource_api.get_project(tenant_id)
|
||||
blob = dict(
|
||||
access=uuid.uuid4().hex,
|
||||
secret=uuid.uuid4().hex,
|
||||
trust_id=self.oslo_context.trust_id,
|
||||
)
|
||||
blob = {
|
||||
'access': uuid.uuid4().hex,
|
||||
'secret': uuid.uuid4().hex,
|
||||
'trust_id': self.oslo_context.trust_id,
|
||||
}
|
||||
credential_id = utils.hash_access_key(blob['access'])
|
||||
cred_data = dict(
|
||||
user_id=user_id,
|
||||
project_id=tenant_id,
|
||||
blob=jsonutils.dumps(blob),
|
||||
id=credential_id,
|
||||
type=CRED_TYPE_EC2,
|
||||
)
|
||||
cred_data = {
|
||||
'user_id': user_id,
|
||||
'project_id': tenant_id,
|
||||
'blob': jsonutils.dumps(blob),
|
||||
'id': credential_id,
|
||||
'type': CRED_TYPE_EC2,
|
||||
}
|
||||
PROVIDERS.credential_api.create_credential(credential_id, cred_data)
|
||||
ref = _convert_v3_to_ec2_credential(cred_data)
|
||||
return self.wrap_member(ref), http.client.CREATED
|
||||
@ -537,10 +535,10 @@ class OAuth1AccessTokenCRUDResource(_OAuth1ResourceBase):
|
||||
access_token = PROVIDERS.oauth_api.get_access_token(access_token_id)
|
||||
reason = (
|
||||
'Invalidating the token cache because an access token for '
|
||||
'consumer %(consumer_id)s has been deleted. Authorization for '
|
||||
'consumer {consumer_id} has been deleted. Authorization for '
|
||||
'users with OAuth tokens will be recalculated and enforced '
|
||||
'accordingly the next time they authenticate or validate a '
|
||||
'token.' % {'consumer_id': access_token['consumer_id']}
|
||||
'token.'.format(consumer_id=access_token['consumer_id'])
|
||||
)
|
||||
notifications.invalidate_token_cache_notification(reason)
|
||||
PROVIDERS.oauth_api.delete_access_token(
|
||||
@ -752,8 +750,7 @@ class UserAppCredGetDeleteResource(ks_flask.ResourceBase):
|
||||
"""
|
||||
target = _update_request_user_id_attribute()
|
||||
ENFORCER.enforce_call(
|
||||
action='identity:get_application_credential',
|
||||
target_attr=target,
|
||||
action='identity:get_application_credential', target_attr=target
|
||||
)
|
||||
ref = PROVIDERS.application_credential_api.get_application_credential(
|
||||
application_credential_id
|
||||
@ -787,11 +784,7 @@ class UserAccessRuleListResource(ks_flask.ResourceBase):
|
||||
|
||||
GET/HEAD /v3/users/{user_id}/access_rules
|
||||
"""
|
||||
filters = (
|
||||
'service',
|
||||
'path',
|
||||
'method',
|
||||
)
|
||||
filters = ('service', 'path', 'method')
|
||||
ENFORCER.enforce_call(
|
||||
action='identity:list_access_rules',
|
||||
filters=filters,
|
||||
|
@ -52,9 +52,7 @@ def _schema_validator(
|
||||
schema_validator.validate(target)
|
||||
|
||||
|
||||
def request_body_schema(
|
||||
schema: ty.Optional[ty.Dict[str, ty.Any]] = None,
|
||||
):
|
||||
def request_body_schema(schema: ty.Optional[ty.Dict[str, ty.Any]] = None):
|
||||
"""Register a schema to validate request body.
|
||||
|
||||
``schema`` will be used for validating the request body just before the API
|
||||
@ -88,9 +86,7 @@ def request_body_schema(
|
||||
return add_validator
|
||||
|
||||
|
||||
def request_query_schema(
|
||||
schema: ty.Optional[ty.Dict[str, ty.Any]] = None,
|
||||
):
|
||||
def request_query_schema(schema: ty.Optional[ty.Dict[str, ty.Any]] = None):
|
||||
"""Register a schema to validate request query string parameters.
|
||||
|
||||
``schema`` will be used for validating request query strings just before
|
||||
@ -113,13 +109,7 @@ def request_query_schema(
|
||||
else:
|
||||
req = flask.request.args
|
||||
|
||||
_schema_validator(
|
||||
schema,
|
||||
req,
|
||||
args,
|
||||
kwargs,
|
||||
is_body=True,
|
||||
)
|
||||
_schema_validator(schema, req, args, kwargs, is_body=True)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
wrapper._request_query_schema = schema
|
||||
@ -129,9 +119,7 @@ def request_query_schema(
|
||||
return add_validator
|
||||
|
||||
|
||||
def response_body_schema(
|
||||
schema: ty.Optional[ty.Dict[str, ty.Any]] = None,
|
||||
):
|
||||
def response_body_schema(schema: ty.Optional[ty.Dict[str, ty.Any]] = None):
|
||||
"""Register a schema to validate response body.
|
||||
|
||||
``schema`` will be used for validating the response body just after the API
|
||||
@ -169,13 +157,7 @@ def response_body_schema(
|
||||
else:
|
||||
body = jsonutils.loads(_body)
|
||||
|
||||
_schema_validator(
|
||||
schema,
|
||||
body,
|
||||
args,
|
||||
kwargs,
|
||||
is_body=True,
|
||||
)
|
||||
_schema_validator(schema, body, args, kwargs, is_body=True)
|
||||
return response
|
||||
|
||||
wrapper._response_body_schema = schema
|
||||
|
@ -11,6 +11,7 @@
|
||||
# under the License.
|
||||
|
||||
"""Common parameter types for validating API requests."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
empty: dict[str, Any] = {"type": "null"}
|
||||
@ -24,22 +25,11 @@ name: dict[str, Any] = {
|
||||
|
||||
boolean = {
|
||||
"type": ["boolean", "string"],
|
||||
"enum": [
|
||||
True,
|
||||
"True",
|
||||
"TRUE",
|
||||
"true",
|
||||
False,
|
||||
"False",
|
||||
"FALSE",
|
||||
"false",
|
||||
],
|
||||
"enum": [True, "True", "TRUE", "true", False, "False", "FALSE", "false"],
|
||||
}
|
||||
|
||||
|
||||
domain_id: dict[str, str] = {
|
||||
"type": "string",
|
||||
}
|
||||
domain_id: dict[str, str] = {"type": "string"}
|
||||
|
||||
parent_id: dict[str, str] = {"type": "string", "format": "uuid"}
|
||||
|
||||
@ -60,10 +50,7 @@ tags: dict[str, Any] = {
|
||||
# As OpenAPI request parameters this is an array of string serialized
|
||||
# as csv
|
||||
"openapi": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": _tag_name_property,
|
||||
},
|
||||
"schema": {"type": "array", "items": _tag_name_property},
|
||||
"style": "form",
|
||||
"explode": False,
|
||||
}
|
||||
|
@ -11,6 +11,7 @@
|
||||
# under the License.
|
||||
|
||||
"""Common field types for validating API responses."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
# Common schema for resource `link` attribute
|
||||
|
@ -26,10 +26,7 @@ from keystone.i18n import _
|
||||
|
||||
|
||||
def _soft_validate_additional_properties(
|
||||
validator,
|
||||
additional_properties_value,
|
||||
param_value,
|
||||
schema,
|
||||
validator, additional_properties_value, param_value, schema
|
||||
):
|
||||
"""Validator function.
|
||||
|
||||
|
@ -18,7 +18,6 @@ from keystone import exception
|
||||
|
||||
|
||||
class ApplicationCredentialDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
@abc.abstractmethod
|
||||
def authenticate(self, application_credential_id, secret):
|
||||
"""Validate an application credential.
|
||||
|
@ -125,7 +125,6 @@ class ApplicationCredentialAccessRuleModel(sql.ModelBase, sql.ModelDictMixin):
|
||||
|
||||
|
||||
class ApplicationCredential(base.ApplicationCredentialDriverBase):
|
||||
|
||||
def _check_secret(self, secret, app_cred_ref):
|
||||
secret_hash = app_cred_ref['secret_hash']
|
||||
return password_hashing.check_password(secret, secret_hash)
|
||||
|
@ -21,7 +21,6 @@ CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class AssignmentDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.assignment.list_limit or CONF.list_limit
|
||||
|
||||
|
@ -41,7 +41,6 @@ class AssignmentType:
|
||||
|
||||
|
||||
class Assignment(base.AssignmentDriverBase):
|
||||
|
||||
@classmethod
|
||||
def default_role_driver(cls):
|
||||
return 'sql'
|
||||
@ -55,7 +54,6 @@ class Assignment(base.AssignmentDriverBase):
|
||||
project_id=None,
|
||||
inherited_to_projects=False,
|
||||
):
|
||||
|
||||
assignment_type = AssignmentType.calculate_type(
|
||||
user_id, group_id, project_id, domain_id
|
||||
)
|
||||
@ -182,11 +180,7 @@ class Assignment(base.AssignmentDriverBase):
|
||||
)
|
||||
)
|
||||
except sql.DBDuplicateEntry:
|
||||
msg = 'User {} already has role {} in tenant {}'.format(
|
||||
user_id,
|
||||
role_id,
|
||||
project_id,
|
||||
)
|
||||
msg = f'User {user_id} already has role {role_id} in tenant {project_id}'
|
||||
raise exception.Conflict(type='role grant', details=msg)
|
||||
|
||||
def remove_role_from_user_and_project(self, user_id, project_id, role_id):
|
||||
@ -264,7 +258,6 @@ class Assignment(base.AssignmentDriverBase):
|
||||
project_ids=None,
|
||||
inherited_to_projects=None,
|
||||
):
|
||||
|
||||
def denormalize_role(ref):
|
||||
assignment = {}
|
||||
if ref.type == AssignmentType.USER_PROJECT:
|
||||
|
@ -71,8 +71,8 @@ class Manager(manager.Manager):
|
||||
|
||||
self.event_callbacks = {
|
||||
notifications.ACTIONS.deleted: {
|
||||
'domain': [self._delete_domain_assignments],
|
||||
},
|
||||
'domain': [self._delete_domain_assignments]
|
||||
}
|
||||
}
|
||||
|
||||
def _delete_domain_assignments(
|
||||
@ -209,7 +209,6 @@ class Manager(manager.Manager):
|
||||
inherited_to_projects=False,
|
||||
context=None,
|
||||
):
|
||||
|
||||
# The parameters for this method must match the parameters for
|
||||
# create_grant so that the notifications.role_assignment decorator
|
||||
# will work.
|
||||
@ -286,7 +285,6 @@ class Manager(manager.Manager):
|
||||
inherited_to_projects=False,
|
||||
context=None,
|
||||
):
|
||||
|
||||
# The parameters for this method must match the parameters for
|
||||
# delete_grant so that the notifications.role_assignment decorator
|
||||
# will work.
|
||||
@ -326,16 +324,9 @@ class Manager(manager.Manager):
|
||||
target_id = project_id
|
||||
|
||||
reason = (
|
||||
'Invalidating the token cache because role %(role_id)s was '
|
||||
'removed from %(actor_type)s %(actor_id)s on %(target_type)s '
|
||||
'%(target_id)s.'
|
||||
% {
|
||||
'role_id': role_id,
|
||||
'actor_type': actor_type,
|
||||
'actor_id': actor_id,
|
||||
'target_type': target_type,
|
||||
'target_id': target_id,
|
||||
}
|
||||
f'Invalidating the token cache because role {role_id} was '
|
||||
f'removed from {actor_type} {actor_id} on {target_type} '
|
||||
f'{target_id}.'
|
||||
)
|
||||
notifications.invalidate_token_cache_notification(reason)
|
||||
|
||||
@ -429,7 +420,6 @@ class Manager(manager.Manager):
|
||||
inherited_to_projects=False,
|
||||
initiator=None,
|
||||
):
|
||||
|
||||
# check if role exist before any processing
|
||||
PROVIDERS.role_api.get_role(role_id)
|
||||
|
||||
@ -760,7 +750,7 @@ class Manager(manager.Manager):
|
||||
implied_roles_cache = {}
|
||||
role_refs_to_check = list(role_refs)
|
||||
ref_results = list(role_refs)
|
||||
checked_role_refs = list()
|
||||
checked_role_refs = []
|
||||
while role_refs_to_check:
|
||||
next_ref = role_refs_to_check.pop()
|
||||
checked_role_refs.append(next_ref)
|
||||
@ -1348,9 +1338,8 @@ class Manager(manager.Manager):
|
||||
role = PROVIDERS.role_api.get_role(role_id)
|
||||
if role.get('domain_id'):
|
||||
raise exception.ValidationError(
|
||||
'Role %(role_id)s is a domain-specific role. Unable to use '
|
||||
f'Role {role_id} is a domain-specific role. Unable to use '
|
||||
'a domain-specific role in a system assignment.'
|
||||
% {'role_id': role_id}
|
||||
)
|
||||
target_id = self._SYSTEM_SCOPE_TOKEN
|
||||
assignment_type = self._USER_SYSTEM
|
||||
@ -1420,9 +1409,8 @@ class Manager(manager.Manager):
|
||||
role = PROVIDERS.role_api.get_role(role_id)
|
||||
if role.get('domain_id'):
|
||||
raise exception.ValidationError(
|
||||
'Role %(role_id)s is a domain-specific role. Unable to use '
|
||||
f'Role {role_id} is a domain-specific role. Unable to use '
|
||||
'a domain-specific role in a system assignment.'
|
||||
% {'role_id': role_id}
|
||||
)
|
||||
target_id = self._SYSTEM_SCOPE_TOKEN
|
||||
assignment_type = self._GROUP_SYSTEM
|
||||
@ -1556,10 +1544,10 @@ class RoleManager(manager.Manager):
|
||||
notifications.Audit.deleted(self._ROLE, role_id, initiator)
|
||||
self.get_role.invalidate(self, role_id)
|
||||
reason = (
|
||||
'Invalidating the token cache because role %(role_id)s has been '
|
||||
f'Invalidating the token cache because role {role_id} has been '
|
||||
'removed. Role assignments for users will be recalculated and '
|
||||
'enforced accordingly the next time they authenticate or validate '
|
||||
'a token' % {'role_id': role_id}
|
||||
'a token'
|
||||
)
|
||||
notifications.invalidate_token_cache_notification(reason)
|
||||
COMPUTED_ASSIGNMENTS_REGION.invalidate()
|
||||
|
@ -29,7 +29,6 @@ CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RoleDriverBase(metaclass=abc.ABCMeta):
|
||||
|
||||
def _get_list_limit(self):
|
||||
return CONF.role.list_limit or CONF.list_limit
|
||||
|
||||
|
@ -19,9 +19,7 @@ ROLE_OPTIONS_REGISTRY = resource_options.ResourceOptionRegistry('ROLE')
|
||||
# NOTE(morgan): wrap this in a function for testing purposes.
|
||||
# This is called on import by design.
|
||||
def register_role_options():
|
||||
for opt in [
|
||||
ro_opt.IMMUTABLE_OPT,
|
||||
]:
|
||||
for opt in [ro_opt.IMMUTABLE_OPT]:
|
||||
ROLE_OPTIONS_REGISTRY.register_option(opt)
|
||||
|
||||
|
||||
|
@ -20,7 +20,6 @@ from keystone import exception
|
||||
|
||||
|
||||
class Role(base.RoleDriverBase):
|
||||
|
||||
@sql.handle_conflicts(conflict_type='role')
|
||||
def create_role(self, role_id, role):
|
||||
with sql.session_for_write() as session:
|
||||
|
@ -19,7 +19,6 @@ from keystone.common import sql
|
||||
|
||||
|
||||
class RoleTable(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
|
||||
def to_dict(self, include_extra_dict=False):
|
||||
d = super().to_dict(include_extra_dict=include_extra_dict)
|
||||
if d['domain_id'] == base.NULL_DOMAIN_ID:
|
||||
@ -96,7 +95,7 @@ class ImpliedRoleTable(sql.ModelBase, sql.ModelDictMixin):
|
||||
overrides the `to_dict` function from the base class
|
||||
to avoid having an `extra` field.
|
||||
"""
|
||||
d = dict()
|
||||
d = {}
|
||||
for attr in self.__class__.attributes:
|
||||
d[attr] = getattr(self, attr)
|
||||
return d
|
||||
|
@ -38,7 +38,7 @@ def _get_auth_driver_manager(namespace, plugin_name):
|
||||
|
||||
def load_auth_method(method):
|
||||
plugin_name = CONF.auth.get(method) or 'default'
|
||||
namespace = 'keystone.auth.%s' % method
|
||||
namespace = f'keystone.auth.{method}'
|
||||
driver_manager = _get_auth_driver_manager(namespace, plugin_name)
|
||||
return driver_manager.driver
|
||||
|
||||
@ -261,7 +261,7 @@ class AuthInfo(provider_api.ProviderAPIMixin):
|
||||
app_cred_api = PROVIDERS.application_credential_api
|
||||
app_creds = app_cred_api.list_application_credentials(user_id, hints)
|
||||
if len(app_creds) != 1:
|
||||
message = "Could not find application credential: %s" % name
|
||||
message = f"Could not find application credential: {name}"
|
||||
tr_message = _("Could not find application credential: %s") % name
|
||||
LOG.warning(message)
|
||||
raise exception.Unauthorized(tr_message)
|
||||
|
@ -45,13 +45,7 @@ class AuthMethodHandler(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
to the re-scope type action. Here's an example of ``response_data`` on
|
||||
successful authentication::
|
||||
|
||||
{
|
||||
"methods": [
|
||||
"password",
|
||||
"token"
|
||||
],
|
||||
"user_id": "abc123"
|
||||
}
|
||||
{"methods": ["password", "token"], "user_id": "abc123"}
|
||||
|
||||
Plugins are invoked in the order in which they are specified in the
|
||||
``methods`` attribute of the ``identity`` object. For example,
|
||||
@ -61,23 +55,12 @@ class AuthMethodHandler(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
{
|
||||
"auth": {
|
||||
"identity": {
|
||||
"custom-plugin": {
|
||||
"custom-data": "sdfdfsfsfsdfsf"
|
||||
},
|
||||
"methods": [
|
||||
"custom-plugin",
|
||||
"password",
|
||||
"token"
|
||||
],
|
||||
"custom-plugin": {"custom-data": "sdfdfsfsfsdfsf"},
|
||||
"methods": ["custom-plugin", "password", "token"],
|
||||
"password": {
|
||||
"user": {
|
||||
"id": "s23sfad1",
|
||||
"password": "secret"
|
||||
}
|
||||
"user": {"id": "s23sfad1", "password": "secret"}
|
||||
},
|
||||
"token": {
|
||||
"id": "sdfafasdfsfasfasdfds"
|
||||
}
|
||||
"token": {"id": "sdfafasdfsfasfasdfds"},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,9 +27,7 @@ CONF = keystone.conf.CONF
|
||||
LOG = log.getLogger(__name__)
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
_NOTIFY_OP = 'authenticate'
|
||||
_NOTIFY_EVENT = '{service}.{event}'.format(
|
||||
service=notifications.SERVICE, event=_NOTIFY_OP
|
||||
)
|
||||
_NOTIFY_EVENT = f'{notifications.SERVICE}.{_NOTIFY_OP}'
|
||||
|
||||
|
||||
def construct_method_map_from_config():
|
||||
@ -38,7 +36,7 @@ def construct_method_map_from_config():
|
||||
:returns: a dictionary containing the methods and their indexes
|
||||
|
||||
"""
|
||||
method_map = dict()
|
||||
method_map = {}
|
||||
method_index = 1
|
||||
for method in CONF.auth.methods:
|
||||
method_map[method_index] = method
|
||||
@ -99,7 +97,6 @@ def convert_integer_to_method_list(method_int):
|
||||
|
||||
|
||||
class BaseUserInfo(provider_api.ProviderAPIMixin):
|
||||
|
||||
@classmethod
|
||||
def create(cls, auth_payload, method_name):
|
||||
user_auth_info = cls()
|
||||
@ -213,7 +210,6 @@ class BaseUserInfo(provider_api.ProviderAPIMixin):
|
||||
|
||||
|
||||
class UserAuthInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.password = None
|
||||
@ -225,7 +221,6 @@ class UserAuthInfo(BaseUserInfo):
|
||||
|
||||
|
||||
class TOTPUserInfo(BaseUserInfo):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.passcode = None
|
||||
|
@ -34,7 +34,6 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Mapped(base.AuthMethodHandler):
|
||||
|
||||
def _get_token_ref(self, auth_payload):
|
||||
token_id = auth_payload['id']
|
||||
return PROVIDERS.token_provider_api.validate_token(token_id)
|
||||
@ -184,7 +183,6 @@ def handle_unscoped_token(
|
||||
assignment_api,
|
||||
role_api,
|
||||
):
|
||||
|
||||
def validate_shadow_mapping(
|
||||
shadow_projects, existing_roles, user_domain_id, idp_id
|
||||
):
|
||||
@ -300,7 +298,6 @@ def handle_unscoped_token(
|
||||
)
|
||||
|
||||
if 'projects' in mapped_properties:
|
||||
|
||||
existing_roles = {
|
||||
role['name']: role for role in role_api.list_roles()
|
||||
}
|
||||
|
@ -23,7 +23,6 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Password(base.AuthMethodHandler):
|
||||
|
||||
def authenticate(self, auth_payload):
|
||||
"""Try to authenticate against the identity backend."""
|
||||
response_data = {}
|
||||
|
@ -29,7 +29,6 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Token(base.AuthMethodHandler):
|
||||
|
||||
def _get_token_ref(self, auth_payload):
|
||||
token_id = auth_payload['id']
|
||||
return PROVIDERS.token_provider_api.validate_token(token_id)
|
||||
@ -59,7 +58,6 @@ class Token(base.AuthMethodHandler):
|
||||
def token_authenticate(token):
|
||||
response_data = {}
|
||||
try:
|
||||
|
||||
# Do not allow tokens used for delegation to
|
||||
# create another token, or perform any changes of
|
||||
# state in Keystone. To do so is to invite elevation of
|
||||
|
@ -90,7 +90,6 @@ def _generate_totp_passcodes(secret, included_previous_windows=0):
|
||||
|
||||
|
||||
class TOTP(base.AuthMethodHandler):
|
||||
|
||||
def authenticate(self, auth_payload):
|
||||
"""Try to authenticate using TOTP."""
|
||||
response_data = {}
|
||||
|
@ -21,57 +21,34 @@ token_issue = {
|
||||
'identity': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'methods': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'methods': {'type': 'array', 'items': {'type': 'string'}},
|
||||
'password': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'user': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'type': 'string',
|
||||
},
|
||||
'id': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
'password': {'type': 'string'},
|
||||
'domain': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'id': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
},
|
||||
'token': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'required': [
|
||||
'id',
|
||||
],
|
||||
'properties': {'id': {'type': 'string'}},
|
||||
'required': ['id'],
|
||||
},
|
||||
},
|
||||
'required': [
|
||||
'methods',
|
||||
],
|
||||
'required': ['methods'],
|
||||
},
|
||||
'scope': {
|
||||
# For explicit unscoped authentication the type should not be
|
||||
@ -85,21 +62,13 @@ token_issue = {
|
||||
'project': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
'name': {'type': 'string'},
|
||||
'id': {'type': 'string'},
|
||||
'domain': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'id': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -107,21 +76,13 @@ token_issue = {
|
||||
'domain': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'id': {'type': 'string'},
|
||||
'name': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'OS-TRUST:trust': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'id': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'properties': {'id': {'type': 'string'}},
|
||||
},
|
||||
'system': {
|
||||
'type': 'object',
|
||||
@ -130,9 +91,7 @@ token_issue = {
|
||||
},
|
||||
},
|
||||
},
|
||||
'required': [
|
||||
'identity',
|
||||
],
|
||||
'required': ['identity'],
|
||||
}
|
||||
|
||||
|
||||
|
@ -242,21 +242,23 @@ class CatalogDriverBase(provider_api.ProviderAPIMixin, metaclass=abc.ABCMeta):
|
||||
[
|
||||
{
|
||||
"endpoints": [
|
||||
{
|
||||
"interface": "public",
|
||||
"id": "--endpoint-id--",
|
||||
"region": "RegionOne",
|
||||
"url": "http://external:8776/v1/--project-id--"
|
||||
},
|
||||
{
|
||||
"interface": "internal",
|
||||
"id": "--endpoint-id--",
|
||||
"region": "RegionOne",
|
||||
"url": "http://internal:8776/v1/--project-id--"
|
||||
}],
|
||||
"id": "--service-id--",
|
||||
"type": "volume"
|
||||
}]
|
||||
{
|
||||
"interface": "public",
|
||||
"id": "--endpoint-id--",
|
||||
"region": "RegionOne",
|
||||
"url": "http://external:8776/v1/--project-id--",
|
||||
},
|
||||
{
|
||||
"interface": "internal",
|
||||
"id": "--endpoint-id--",
|
||||
"region": "RegionOne",
|
||||
"url": "http://internal:8776/v1/--project-id--",
|
||||
},
|
||||
],
|
||||
"id": "--service-id--",
|
||||
"type": "volume",
|
||||
}
|
||||
]
|
||||
|
||||
:returns: A list representing the service catalog or an empty list
|
||||
:raises keystone.exception.NotFound: If the endpoint doesn't exist.
|
||||
|
@ -333,7 +333,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
}
|
||||
catalog.setdefault(region, {})
|
||||
catalog[region].setdefault(service_type, default_service)
|
||||
interface_url = '%sURL' % endpoint['interface']
|
||||
interface_url = '{}URL'.format(endpoint['interface'])
|
||||
catalog[region][service_type][interface_url] = url
|
||||
|
||||
return catalog
|
||||
@ -355,12 +355,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
d.update({'user_id': user_id})
|
||||
silent_keyerror_failures = []
|
||||
if project_id:
|
||||
d.update(
|
||||
{
|
||||
'tenant_id': project_id,
|
||||
'project_id': project_id,
|
||||
}
|
||||
)
|
||||
d.update({'tenant_id': project_id, 'project_id': project_id})
|
||||
else:
|
||||
silent_keyerror_failures = ['tenant_id', 'project_id']
|
||||
|
||||
@ -463,8 +458,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
|
||||
def _get_project_endpoint_ref(self, session, endpoint_id, project_id):
|
||||
endpoint_filter_ref = session.get(
|
||||
ProjectEndpoint,
|
||||
(endpoint_id, project_id),
|
||||
ProjectEndpoint, (endpoint_id, project_id)
|
||||
)
|
||||
if endpoint_filter_ref is None:
|
||||
msg = _(
|
||||
@ -576,8 +570,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
self, session, endpoint_group_id, project_id
|
||||
):
|
||||
endpoint_group_project_ref = session.get(
|
||||
ProjectEndpointGroupMembership,
|
||||
(endpoint_group_id, project_id),
|
||||
ProjectEndpointGroupMembership, (endpoint_group_id, project_id)
|
||||
)
|
||||
if endpoint_group_project_ref is None:
|
||||
msg = _('Endpoint Group Project Association not found')
|
||||
|
@ -166,11 +166,7 @@ class Catalog(base.CatalogDriverBase):
|
||||
for key in service_ref:
|
||||
if key.endswith('URL'):
|
||||
interface = key[:-3]
|
||||
endpoint_id = '{}-{}-{}'.format(
|
||||
region_id,
|
||||
service_type,
|
||||
interface,
|
||||
)
|
||||
endpoint_id = f'{region_id}-{service_type}-{interface}'
|
||||
yield {
|
||||
'id': endpoint_id,
|
||||
'service_id': service_type,
|
||||
@ -215,16 +211,10 @@ class Catalog(base.CatalogDriverBase):
|
||||
silent_keyerror_failures = []
|
||||
if project_id:
|
||||
substitutions.update(
|
||||
{
|
||||
'tenant_id': project_id,
|
||||
'project_id': project_id,
|
||||
}
|
||||
{'tenant_id': project_id, 'project_id': project_id}
|
||||
)
|
||||
else:
|
||||
silent_keyerror_failures = [
|
||||
'tenant_id',
|
||||
'project_id',
|
||||
]
|
||||
silent_keyerror_failures = ['tenant_id', 'project_id']
|
||||
|
||||
catalog = {}
|
||||
# TODO(davechen): If there is service with no endpoints, we should
|
||||
|
@ -26,7 +26,6 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Bootstrapper:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
||||
|
@ -52,7 +52,6 @@ LOG = log.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseApp:
|
||||
|
||||
name: str
|
||||
|
||||
@classmethod
|
||||
@ -536,8 +535,7 @@ class ResetLastActive(BaseApp):
|
||||
raise SystemExit('reset_last_active aborted.')
|
||||
|
||||
LOG.debug(
|
||||
"Resetting null values to current time %s",
|
||||
timeutils.utcnow(),
|
||||
"Resetting null values to current time %s", timeutils.utcnow()
|
||||
)
|
||||
drivers = backends.load_backends()
|
||||
identity_api = drivers['identity_api']
|
||||
@ -565,14 +563,14 @@ class BasePermissionsSetup(BaseApp):
|
||||
if a:
|
||||
keystone_user_id = utils.get_unix_user(a)[0]
|
||||
except KeyError:
|
||||
raise ValueError("Unknown user '%s' in --keystone-user" % a)
|
||||
raise ValueError(f"Unknown user '{a}' in --keystone-user")
|
||||
|
||||
try:
|
||||
a = CONF.command.keystone_group
|
||||
if a:
|
||||
keystone_group_id = utils.get_unix_group(a)[0]
|
||||
except KeyError:
|
||||
raise ValueError("Unknown group '%s' in --keystone-group" % a)
|
||||
raise ValueError(f"Unknown group '{a}' in --keystone-group")
|
||||
|
||||
return keystone_user_id, keystone_group_id
|
||||
|
||||
@ -1180,7 +1178,6 @@ def _domain_config_finder(conf_dir):
|
||||
|
||||
|
||||
class DomainConfigUploadFiles:
|
||||
|
||||
def __init__(self, domain_config_finder=_domain_config_finder):
|
||||
super().__init__()
|
||||
self.load_backends()
|
||||
@ -1518,12 +1515,9 @@ class MappingEngineTester(BaseApp):
|
||||
tester.normalize_assertion()
|
||||
|
||||
if CONF.command.engine_debug:
|
||||
print(f"Using Rules:\n{jsonutils.dumps(tester.rules, indent=2)}")
|
||||
print(
|
||||
"Using Rules:\n%s" % (jsonutils.dumps(tester.rules, indent=2))
|
||||
)
|
||||
print(
|
||||
"Using Assertion:\n%s"
|
||||
% (jsonutils.dumps(tester.assertion, indent=2))
|
||||
f"Using Assertion:\n{jsonutils.dumps(tester.assertion, indent=2)}"
|
||||
)
|
||||
|
||||
rp = mapping_engine.RuleProcessor(
|
||||
|
@ -50,8 +50,9 @@ def diagnose():
|
||||
# Some symptoms may take a long time to check, so let's keep
|
||||
# curious users posted on our progress as we go.
|
||||
print(
|
||||
'Checking for %s...'
|
||||
% symptom.__name__[len(SYMPTOM_PREFIX) :].replace('_', ' ')
|
||||
'Checking for {}...'.format(
|
||||
symptom.__name__[len(SYMPTOM_PREFIX) :].replace('_', ' ')
|
||||
)
|
||||
)
|
||||
|
||||
# All symptoms are just callables that return true when they match the
|
||||
@ -65,9 +66,7 @@ def diagnose():
|
||||
# passing a string here. Also, we include a line break here to
|
||||
# visually separate the symptom's description from any other
|
||||
# checks -- it provides a better user experience.
|
||||
print(
|
||||
_('\nWARNING: %s') % _(symptom.__doc__)
|
||||
) # noqa: See comment above.
|
||||
print(_('\nWARNING: %s') % _(symptom.__doc__)) # noqa: See comment above.
|
||||
|
||||
return symptoms_found
|
||||
|
||||
|
@ -85,10 +85,10 @@ def symptom_LDAP_file_based_domain_specific_configs():
|
||||
if invalid_files:
|
||||
invalid_str = ', '.join(invalid_files)
|
||||
print(
|
||||
'Warning: The following non-config files were found: %s\n'
|
||||
f'Warning: The following non-config files were found: {invalid_str}\n'
|
||||
'If they are intended to be config files then rename them '
|
||||
'to the form of `keystone.<domain_name>.conf`. '
|
||||
'Otherwise, ignore this warning' % invalid_str
|
||||
'Otherwise, ignore this warning'
|
||||
)
|
||||
return True
|
||||
else:
|
||||
|
@ -28,7 +28,6 @@ PROVIDERS = provider_api.ProviderAPIs
|
||||
|
||||
|
||||
class Identity:
|
||||
|
||||
def __init__(self):
|
||||
backends.load_backends()
|
||||
|
||||
|
@ -54,13 +54,14 @@ class Checks(upgradecheck.UpgradeCommands):
|
||||
if any(failed_rules):
|
||||
return upgradecheck.Result(
|
||||
upgradecheck.Code.FAILURE,
|
||||
"Policy check string for rules \"%s\" are overridden to "
|
||||
"Policy check string for rules \"{}\" are overridden to "
|
||||
"\"\", \"@\", or []. In the next release, this will cause "
|
||||
"these rules to be fully permissive as hardcoded enforcement "
|
||||
"will be removed. To correct this issue, either stop "
|
||||
"overriding these rules in config to accept the defaults, or "
|
||||
"explicitly set check strings that are not empty."
|
||||
% "\", \"".join(failed_rules),
|
||||
"explicitly set check strings that are not empty.".format(
|
||||
"\", \"".join(failed_rules)
|
||||
),
|
||||
)
|
||||
return upgradecheck.Result(
|
||||
upgradecheck.Code.SUCCESS, 'Trust policies are safe.'
|
||||
@ -70,11 +71,7 @@ class Checks(upgradecheck.UpgradeCommands):
|
||||
hints = driver_hints.Hints()
|
||||
hints.add_filter('domain_id', None) # Only check global roles
|
||||
roles = PROVIDERS.role_api.list_roles(hints=hints)
|
||||
default_roles = (
|
||||
'admin',
|
||||
'member',
|
||||
'reader',
|
||||
)
|
||||
default_roles = ('admin', 'member', 'reader')
|
||||
failed_roles = []
|
||||
for role in [r for r in roles if r['name'] in default_roles]:
|
||||
if not role.get('options', {}).get('immutable'):
|
||||
@ -82,7 +79,7 @@ class Checks(upgradecheck.UpgradeCommands):
|
||||
if any(failed_roles):
|
||||
return upgradecheck.Result(
|
||||
upgradecheck.Code.FAILURE,
|
||||
"Roles are not immutable: %s" % ", ".join(failed_roles),
|
||||
"Roles are not immutable: {}".format(", ".join(failed_roles)),
|
||||
)
|
||||
return upgradecheck.Result(
|
||||
upgradecheck.Code.SUCCESS, "Default roles are immutable."
|
||||
|
2
keystone/common/cache/_context_cache.py
vendored
2
keystone/common/cache/_context_cache.py
vendored
@ -11,6 +11,7 @@
|
||||
# under the License.
|
||||
|
||||
"""A dogpile.cache proxy that caches objects in the request local cache."""
|
||||
|
||||
from dogpile.cache import api
|
||||
from dogpile.cache import proxy
|
||||
from oslo_context import context as oslo_context
|
||||
@ -28,7 +29,6 @@ def _register_model_handler(handler_class):
|
||||
|
||||
|
||||
class _ResponseCacheProxy(proxy.ProxyBackend):
|
||||
|
||||
__key_pfx = '_request_cache_%s'
|
||||
|
||||
def _get_request_context(self):
|
||||
|
4
keystone/common/cache/core.py
vendored
4
keystone/common/cache/core.py
vendored
@ -27,7 +27,6 @@ CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class RegionInvalidationManager:
|
||||
|
||||
REGION_KEY_PREFIX = '<<<region>>>:'
|
||||
|
||||
def __init__(self, invalidation_region, region_name):
|
||||
@ -53,7 +52,6 @@ class RegionInvalidationManager:
|
||||
|
||||
|
||||
class DistributedInvalidationStrategy(region.RegionInvalidationStrategy):
|
||||
|
||||
def __init__(self, region_manager):
|
||||
self._region_manager = region_manager
|
||||
|
||||
@ -165,7 +163,7 @@ def configure_invalidation_region():
|
||||
config_dict['expiration_time'] = None # we don't want an expiration
|
||||
|
||||
CACHE_INVALIDATION_REGION.configure_from_config(
|
||||
config_dict, '%s.' % CONF.cache.config_prefix
|
||||
config_dict, f'{CONF.cache.config_prefix}.'
|
||||
)
|
||||
|
||||
# NOTE(breton): Wrap the cache invalidation region to avoid excessive
|
||||
|
@ -22,7 +22,6 @@ def _prop(name):
|
||||
|
||||
|
||||
class RequestContext(oslo_context.RequestContext):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.username = kwargs.pop('username', None)
|
||||
self.project_tag_name = kwargs.pop('project_tag_name', None)
|
||||
|
@ -96,7 +96,7 @@ class Hints:
|
||||
|
||||
def __init__(self):
|
||||
self.limit = None
|
||||
self.filters = list()
|
||||
self.filters = []
|
||||
self.cannot_match = False
|
||||
|
||||
def add_filter(
|
||||
|
@ -34,7 +34,6 @@ NULL_KEY = base64.urlsafe_b64encode(b'\x00' * 32)
|
||||
|
||||
|
||||
class FernetUtils:
|
||||
|
||||
def __init__(self, key_repository, max_active_keys, config_group):
|
||||
self.key_repository = key_repository
|
||||
self.max_active_keys = max_active_keys
|
||||
@ -157,8 +156,8 @@ class FernetUtils:
|
||||
LOG.info('Become a valid new key: %s', valid_key_file)
|
||||
|
||||
def _get_key_files(self, key_repo):
|
||||
key_files = dict()
|
||||
keys = dict()
|
||||
key_files = {}
|
||||
keys = {}
|
||||
for filename in os.listdir(key_repo):
|
||||
path = os.path.join(key_repo, str(filename))
|
||||
if os.path.isfile(path):
|
||||
|
@ -20,34 +20,28 @@ from keystone.i18n import _
|
||||
|
||||
|
||||
def build_v3_resource_relation(resource_name):
|
||||
return (
|
||||
'https://docs.openstack.org/api/openstack-identity/3/rel/%s'
|
||||
% resource_name
|
||||
)
|
||||
return f'https://docs.openstack.org/api/openstack-identity/3/rel/{resource_name}'
|
||||
|
||||
|
||||
def build_v3_extension_resource_relation(
|
||||
extension_name, extension_version, resource_name
|
||||
):
|
||||
return (
|
||||
'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/rel/'
|
||||
'%s' % (extension_name, extension_version, resource_name)
|
||||
f'https://docs.openstack.org/api/openstack-identity/3/ext/{extension_name}/{extension_version}/rel/'
|
||||
f'{resource_name}'
|
||||
)
|
||||
|
||||
|
||||
def build_v3_parameter_relation(parameter_name):
|
||||
return (
|
||||
'https://docs.openstack.org/api/openstack-identity/3/param/%s'
|
||||
% parameter_name
|
||||
)
|
||||
return f'https://docs.openstack.org/api/openstack-identity/3/param/{parameter_name}'
|
||||
|
||||
|
||||
def build_v3_extension_parameter_relation(
|
||||
extension_name, extension_version, parameter_name
|
||||
):
|
||||
return (
|
||||
'https://docs.openstack.org/api/openstack-identity/3/ext/%s/%s/param/'
|
||||
'%s' % (extension_name, extension_version, parameter_name)
|
||||
f'https://docs.openstack.org/api/openstack-identity/3/ext/{extension_name}/{extension_version}/param/'
|
||||
f'{parameter_name}'
|
||||
)
|
||||
|
||||
|
||||
|
@ -86,10 +86,8 @@ class _TraceMeta(type):
|
||||
@staticmethod
|
||||
def wrapper(__f, __classname):
|
||||
__argspec = inspect.getfullargspec(__f)
|
||||
__fn_info = '{module}.{classname}.{funcname}'.format(
|
||||
module=inspect.getmodule(__f).__name__,
|
||||
classname=__classname,
|
||||
funcname=__f.__name__,
|
||||
__fn_info = (
|
||||
f'{inspect.getmodule(__f).__name__}.{__classname}.{__f.__name__}'
|
||||
)
|
||||
# NOTE(morganfainberg): Omit "cls" and "self" when printing trace logs
|
||||
# the index can be calculated at wrap time rather than at runtime.
|
||||
|
@ -26,11 +26,7 @@ class Bcrypt(password_hashers.PasswordHasher):
|
||||
ident_values: set[str] = {"$2$", "$2a$", "$2b$", "$2x$", "$2y$"}
|
||||
|
||||
@staticmethod
|
||||
def hash(
|
||||
password: bytes,
|
||||
rounds: int = 12,
|
||||
**kwargs,
|
||||
) -> str:
|
||||
def hash(password: bytes, rounds: int = 12, **kwargs) -> str:
|
||||
"""Generate password hash string with ident and params
|
||||
|
||||
https://pypi.org/project/bcrypt/
|
||||
@ -66,11 +62,7 @@ class Bcrypt_sha256(password_hashers.PasswordHasher):
|
||||
prefix: str = "$bcrypt-sha256$"
|
||||
|
||||
@staticmethod
|
||||
def hash(
|
||||
password: bytes,
|
||||
rounds: int = 12,
|
||||
**kwargs,
|
||||
) -> str:
|
||||
def hash(password: bytes, rounds: int = 12, **kwargs) -> str:
|
||||
"""Generate password hash string with ident and params
|
||||
|
||||
https://pypi.org/project/bcrypt/
|
||||
|
@ -29,11 +29,7 @@ class Sha512(password_hashers.PasswordHasher):
|
||||
hash_algo = hashes.SHA512()
|
||||
|
||||
@staticmethod
|
||||
def hash(
|
||||
password: bytes,
|
||||
salt_size: int = 16,
|
||||
rounds: int = 25000,
|
||||
) -> str:
|
||||
def hash(password: bytes, salt_size: int = 16, rounds: int = 25000) -> str:
|
||||
"""Generate password hash string with ident and params
|
||||
|
||||
https://cryptography.io/en/stable/hazmat/primitives/key-derivation-functions/#pbkdf2
|
||||
@ -49,10 +45,7 @@ class Sha512(password_hashers.PasswordHasher):
|
||||
|
||||
# Prepave the kdf function with params
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=Sha512.hash_algo,
|
||||
length=64,
|
||||
salt=salt,
|
||||
iterations=rounds,
|
||||
algorithm=Sha512.hash_algo, length=64, salt=salt, iterations=rounds
|
||||
)
|
||||
|
||||
# derive - create a digest
|
||||
@ -101,10 +94,7 @@ class Sha512(password_hashers.PasswordHasher):
|
||||
|
||||
# Prepave the kdf function with params
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=Sha512.hash_algo,
|
||||
length=64,
|
||||
salt=salt,
|
||||
iterations=rounds,
|
||||
algorithm=Sha512.hash_algo, length=64, salt=salt, iterations=rounds
|
||||
)
|
||||
|
||||
# Verify the key.
|
||||
|
@ -28,9 +28,7 @@ RULE_ADMIN_OR_TARGET_PROJECT = (
|
||||
)
|
||||
RULE_ADMIN_OR_TOKEN_SUBJECT = 'rule:admin_or_token_subject' # nosec
|
||||
RULE_REVOKE_EVENT_OR_ADMIN = 'rule:revoke_event_or_admin'
|
||||
RULE_SERVICE_ADMIN_OR_TOKEN_SUBJECT = (
|
||||
'rule:service_admin_or_token_subject' # nosec
|
||||
)
|
||||
RULE_SERVICE_ADMIN_OR_TOKEN_SUBJECT = 'rule:service_admin_or_token_subject' # nosec
|
||||
RULE_SERVICE_OR_ADMIN = 'rule:service_or_admin'
|
||||
RULE_TRUST_OWNER = 'user_id:%(trust.trustor_user_id)s'
|
||||
|
||||
|
@ -40,9 +40,7 @@ SYSTEM_ADMIN_OR_DOMAIN_ADMIN_OR_PROJECT_ADMIN = (
|
||||
# /v3/users/{user_id}/project path.
|
||||
SYSTEM_READER_OR_DOMAIN_READER_OR_OWNER = (
|
||||
# System reader policy
|
||||
'('
|
||||
+ base.SYSTEM_READER
|
||||
+ ') or '
|
||||
'(' + base.SYSTEM_READER + ') or '
|
||||
# Domain reader policy
|
||||
'(role:reader and domain_id:%(target.user.domain_id)s) or '
|
||||
# User accessing the API with a token they've obtained, matching
|
||||
|
@ -35,7 +35,7 @@ class ProviderAPIRegistry:
|
||||
try:
|
||||
return self.__registry[item]
|
||||
except KeyError:
|
||||
raise AttributeError("'ProviderAPIs' has no attribute %s" % item)
|
||||
raise AttributeError(f"'ProviderAPIs' has no attribute {item}")
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
"""Do not allow setting values on the registry object."""
|
||||
@ -58,9 +58,8 @@ class ProviderAPIRegistry:
|
||||
|
||||
if name in self.__registry:
|
||||
raise DuplicateProviderError(
|
||||
'`%(name)s` has already been registered as an api '
|
||||
'provider by `%(prov)r`'
|
||||
% {'name': name, 'prov': self.__registry[name]}
|
||||
f'`{name}` has already been registered as an api '
|
||||
f'provider by `{self.__registry[name]!r}`'
|
||||
)
|
||||
self.__registry[name] = obj
|
||||
|
||||
|
@ -242,7 +242,7 @@ class RBACEnforcer:
|
||||
member_name = None
|
||||
func = getattr(resource, 'get_member_from_driver', None)
|
||||
if member_name is not None and callable(func):
|
||||
key = '%s_id' % member_name
|
||||
key = f'{member_name}_id'
|
||||
if key in (flask.request.view_args or {}):
|
||||
# NOTE(morgan): For most correct setup, instantiate the
|
||||
# view_class. There is no current support for passing
|
||||
|
@ -107,11 +107,11 @@ def render_token_response_from_model(token, include_catalog=True):
|
||||
token_reference['token']['service_providers'] = sps
|
||||
if token.is_federated:
|
||||
PROVIDERS.federation_api.get_idp(token.identity_provider_id)
|
||||
federated_dict = dict(
|
||||
groups=token.federated_groups,
|
||||
identity_provider={'id': token.identity_provider_id},
|
||||
protocol={'id': token.protocol_id},
|
||||
)
|
||||
federated_dict = {
|
||||
'groups': token.federated_groups,
|
||||
'identity_provider': {'id': token.identity_provider_id},
|
||||
'protocol': {'id': token.protocol_id},
|
||||
}
|
||||
token_reference['token']['user']['OS-FEDERATION'] = federated_dict
|
||||
del token_reference['token']['user']['password_expires_at']
|
||||
if token.access_token_id:
|
||||
|
@ -183,7 +183,6 @@ class ResourceOptionRegistry:
|
||||
|
||||
|
||||
class ResourceOption:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
option_id,
|
||||
|
@ -18,6 +18,7 @@ Before using this module, call initialize(). This has to be done before
|
||||
CONF() because it sets up configuration options.
|
||||
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import functools
|
||||
|
||||
@ -117,7 +118,6 @@ ModelBase.__init__ = initialize_decorator(ModelBase.__init__)
|
||||
|
||||
# Special Fields
|
||||
class JsonBlob(sql_types.TypeDecorator):
|
||||
|
||||
impl = sql.Text
|
||||
# NOTE(ralonsoh): set to True as any other TypeDecorator in SQLAlchemy
|
||||
# https://docs.sqlalchemy.org/en/14/core/custom_types.html# \
|
||||
@ -256,7 +256,6 @@ class ModelDictMixinWithExtras(models.ModelBase):
|
||||
|
||||
|
||||
class ModelDictMixin(models.ModelBase):
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
"""Return a model instance from a dictionary."""
|
||||
@ -413,13 +412,13 @@ def _filter(model, query, hints):
|
||||
|
||||
if filter_['comparator'] == 'contains':
|
||||
_WontMatch.check(filter_['value'], column_attr)
|
||||
query_term = column_attr.ilike('%%%s%%' % filter_['value'])
|
||||
query_term = column_attr.ilike('%{}%'.format(filter_['value']))
|
||||
elif filter_['comparator'] == 'startswith':
|
||||
_WontMatch.check(filter_['value'], column_attr)
|
||||
query_term = column_attr.ilike('%s%%' % filter_['value'])
|
||||
query_term = column_attr.ilike('{}%'.format(filter_['value']))
|
||||
elif filter_['comparator'] == 'endswith':
|
||||
_WontMatch.check(filter_['value'], column_attr)
|
||||
query_term = column_attr.ilike('%%%s' % filter_['value'])
|
||||
query_term = column_attr.ilike('%{}'.format(filter_['value']))
|
||||
else:
|
||||
# It's a filter we don't understand, so let the caller
|
||||
# work out if they need to do something with it.
|
||||
|
@ -51,8 +51,7 @@ def _migration_script_ops(context, directive, phase):
|
||||
"""
|
||||
autogen_kwargs = {}
|
||||
version_path = upgrades.get_version_branch_path(
|
||||
release=upgrades.CURRENT_RELEASE,
|
||||
branch=phase,
|
||||
release=upgrades.CURRENT_RELEASE, branch=phase
|
||||
)
|
||||
upgrades.check_bootstrap_new_branch(phase, version_path, autogen_kwargs)
|
||||
|
||||
@ -65,7 +64,7 @@ def _migration_script_ops(context, directive, phase):
|
||||
),
|
||||
ops.DowngradeOps(ops=[]),
|
||||
message=directive.message,
|
||||
**autogen_kwargs
|
||||
**autogen_kwargs,
|
||||
)
|
||||
|
||||
if not op.upgrade_ops.is_empty():
|
||||
|
@ -134,14 +134,10 @@ def do_upgrade(config, cmd):
|
||||
|
||||
if revision in upgrades.MILESTONES:
|
||||
expand_revisions = _find_milestone_revisions(
|
||||
config,
|
||||
revision,
|
||||
upgrades.EXPAND_BRANCH,
|
||||
config, revision, upgrades.EXPAND_BRANCH
|
||||
)
|
||||
contract_revisions = _find_milestone_revisions(
|
||||
config,
|
||||
revision,
|
||||
upgrades.CONTRACT_BRANCH,
|
||||
config, revision, upgrades.CONTRACT_BRANCH
|
||||
)
|
||||
# Expand revisions must be run before contract revisions
|
||||
revisions = expand_revisions + contract_revisions
|
||||
@ -152,10 +148,7 @@ def do_upgrade(config, cmd):
|
||||
# if not CONF.command.sql:
|
||||
# run_sanity_checks(config, revision)
|
||||
do_alembic_command(
|
||||
config,
|
||||
cmd,
|
||||
revision=revision,
|
||||
sql=CONF.command.sql,
|
||||
config, cmd, revision=revision, sql=CONF.command.sql
|
||||
)
|
||||
|
||||
|
||||
@ -179,8 +172,7 @@ def do_revision(config, cmd):
|
||||
for branch in branches:
|
||||
args = copy.copy(kwargs)
|
||||
version_path = upgrades.get_version_branch_path(
|
||||
release=upgrades.CURRENT_RELEASE,
|
||||
branch=branch,
|
||||
release=upgrades.CURRENT_RELEASE, branch=branch
|
||||
)
|
||||
upgrades.check_bootstrap_new_branch(branch, version_path, args)
|
||||
do_alembic_command(config, cmd, **args)
|
||||
|
@ -46,8 +46,7 @@ def upgrade():
|
||||
if bind.engine.name == 'mysql':
|
||||
# Set default DB charset to UTF8.
|
||||
op.execute(
|
||||
'ALTER DATABASE %s DEFAULT CHARACTER SET utf8'
|
||||
% bind.engine.url.database
|
||||
f'ALTER DATABASE {bind.engine.url.database} DEFAULT CHARACTER SET utf8'
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
@ -89,11 +88,7 @@ def upgrade():
|
||||
sql.Column('role_id', sql.String(64), nullable=False),
|
||||
sql.Column('inherited', sql.Boolean, default=False, nullable=False),
|
||||
sql.PrimaryKeyConstraint(
|
||||
'type',
|
||||
'actor_id',
|
||||
'target_id',
|
||||
'role_id',
|
||||
'inherited',
|
||||
'type', 'actor_id', 'target_id', 'role_id', 'inherited'
|
||||
),
|
||||
sql.Index('ix_actor_id', 'actor_id'),
|
||||
mysql_engine='InnoDB',
|
||||
@ -109,8 +104,7 @@ def upgrade():
|
||||
sql.Column('external_id', sql.String(64)),
|
||||
sql.Column('user_id', sql.String(64)),
|
||||
sql.UniqueConstraint(
|
||||
'external_id',
|
||||
name='access_rule_external_id_key',
|
||||
'external_id', name='access_rule_external_id_key'
|
||||
),
|
||||
sql.UniqueConstraint(
|
||||
'user_id',
|
||||
@ -149,11 +143,7 @@ def upgrade():
|
||||
sql.Column('type', sql.String(length=255), nullable=False),
|
||||
sql.Column('extra', ks_sql.JsonBlob.impl),
|
||||
sql.Column('key_hash', sql.String(64), nullable=False),
|
||||
sql.Column(
|
||||
'encrypted_blob',
|
||||
ks_sql.Text,
|
||||
nullable=False,
|
||||
),
|
||||
sql.Column('encrypted_blob', ks_sql.Text, nullable=False),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
)
|
||||
@ -166,9 +156,7 @@ def upgrade():
|
||||
sql.Column('description', sql.Text),
|
||||
sql.Column('extra', ks_sql.JsonBlob.impl),
|
||||
sql.UniqueConstraint(
|
||||
'domain_id',
|
||||
'name',
|
||||
name='ixu_group_name_domain_id',
|
||||
'domain_id', 'name', name='ixu_group_name_domain_id'
|
||||
),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
@ -189,10 +177,7 @@ def upgrade():
|
||||
nullable=False,
|
||||
),
|
||||
sql.UniqueConstraint(
|
||||
'domain_id',
|
||||
'local_id',
|
||||
'entity_type',
|
||||
name='domain_id',
|
||||
'domain_id', 'local_id', 'entity_type', name='domain_id'
|
||||
),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
@ -261,19 +246,13 @@ def upgrade():
|
||||
sql.Column(
|
||||
'domain_id',
|
||||
sql.String(length=64),
|
||||
sql.ForeignKey(
|
||||
'project.id',
|
||||
name='project_domain_id_fkey',
|
||||
),
|
||||
sql.ForeignKey('project.id', name='project_domain_id_fkey'),
|
||||
nullable=False,
|
||||
),
|
||||
sql.Column(
|
||||
'parent_id',
|
||||
sql.String(64),
|
||||
sql.ForeignKey(
|
||||
'project.id',
|
||||
name='project_parent_id_fkey',
|
||||
),
|
||||
sql.ForeignKey('project.id', name='project_parent_id_fkey'),
|
||||
nullable=True,
|
||||
),
|
||||
sql.Column(
|
||||
@ -284,9 +263,7 @@ def upgrade():
|
||||
default=False,
|
||||
),
|
||||
sql.UniqueConstraint(
|
||||
'domain_id',
|
||||
'name',
|
||||
name='ixu_project_name_domain_id',
|
||||
'domain_id', 'name', name='ixu_project_name_domain_id'
|
||||
),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
@ -439,9 +416,7 @@ def upgrade():
|
||||
),
|
||||
sql.Column('description', sql.String(255), nullable=True),
|
||||
sql.UniqueConstraint(
|
||||
'name',
|
||||
'domain_id',
|
||||
name='ixu_role_name_domain_id',
|
||||
'name', 'domain_id', name='ixu_role_name_domain_id'
|
||||
),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
@ -558,16 +533,8 @@ def upgrade():
|
||||
'expires_at_int',
|
||||
name='duplicate_trust_constraint_expanded',
|
||||
),
|
||||
sql.Column(
|
||||
'redelegated_trust_id',
|
||||
sql.String(64),
|
||||
nullable=True,
|
||||
),
|
||||
sql.Column(
|
||||
'redelegation_count',
|
||||
sql.Integer,
|
||||
nullable=True,
|
||||
),
|
||||
sql.Column('redelegated_trust_id', sql.String(64), nullable=True),
|
||||
sql.Column('redelegation_count', sql.Integer, nullable=True),
|
||||
mysql_engine='InnoDB',
|
||||
mysql_charset='utf8',
|
||||
)
|
||||
@ -604,18 +571,14 @@ def upgrade():
|
||||
sql.Column(
|
||||
'user_id',
|
||||
sql.String(length=64),
|
||||
sql.ForeignKey(
|
||||
'user.id',
|
||||
name='fk_user_group_membership_user_id',
|
||||
),
|
||||
sql.ForeignKey('user.id', name='fk_user_group_membership_user_id'),
|
||||
primary_key=True,
|
||||
),
|
||||
sql.Column(
|
||||
'group_id',
|
||||
sql.String(length=64),
|
||||
sql.ForeignKey(
|
||||
'group.id',
|
||||
name='fk_user_group_membership_group_id',
|
||||
'group.id', name='fk_user_group_membership_group_id'
|
||||
),
|
||||
primary_key=True,
|
||||
),
|
||||
@ -720,10 +683,7 @@ def upgrade():
|
||||
sql.Column(
|
||||
'service_id',
|
||||
sql.String(length=64),
|
||||
sql.ForeignKey(
|
||||
'service.id',
|
||||
name='endpoint_service_id_fkey',
|
||||
),
|
||||
sql.ForeignKey('service.id', name='endpoint_service_id_fkey'),
|
||||
nullable=False,
|
||||
),
|
||||
sql.Column('url', sql.Text, nullable=False),
|
||||
@ -738,10 +698,7 @@ def upgrade():
|
||||
sql.Column(
|
||||
'region_id',
|
||||
sql.String(length=255),
|
||||
sql.ForeignKey(
|
||||
'region.id',
|
||||
name='fk_endpoint_region_id',
|
||||
),
|
||||
sql.ForeignKey('region.id', name='fk_endpoint_region_id'),
|
||||
nullable=True,
|
||||
),
|
||||
# NOTE(stevemar): The index was named 'service_id' in
|
||||
@ -835,10 +792,7 @@ def upgrade():
|
||||
# FIXME(stephenfin): This should have a foreign key constraint on
|
||||
# registered_limit.id, but sqlalchemy-migrate clearly didn't handle
|
||||
# creating a column with embedded FK info as was attempted in 048
|
||||
sql.Column(
|
||||
'registered_limit_id',
|
||||
sql.String(64),
|
||||
),
|
||||
sql.Column('registered_limit_id', sql.String(64)),
|
||||
sql.Column('domain_id', sql.String(64), nullable=True),
|
||||
# NOTE(stephenfin): Name chosen to preserve backwards compatibility
|
||||
# with names used for primary key unique constraints
|
||||
@ -850,12 +804,7 @@ def upgrade():
|
||||
op.create_table(
|
||||
'local_user',
|
||||
sql.Column('id', sql.Integer, primary_key=True, nullable=False),
|
||||
sql.Column(
|
||||
'user_id',
|
||||
sql.String(64),
|
||||
nullable=False,
|
||||
unique=True,
|
||||
),
|
||||
sql.Column('user_id', sql.String(64), nullable=False, unique=True),
|
||||
sql.Column('domain_id', sql.String(64), nullable=False),
|
||||
sql.Column('name', sql.String(255), nullable=False),
|
||||
sql.Column('failed_auth_count', sql.Integer, nullable=True),
|
||||
@ -874,11 +823,7 @@ def upgrade():
|
||||
'nonlocal_user',
|
||||
sql.Column('domain_id', sql.String(64), primary_key=True),
|
||||
sql.Column('name', sql.String(255), primary_key=True),
|
||||
sql.Column(
|
||||
'user_id',
|
||||
sql.String(64),
|
||||
nullable=False,
|
||||
),
|
||||
sql.Column('user_id', sql.String(64), nullable=False),
|
||||
sql.ForeignKeyConstraint(
|
||||
['user_id', 'domain_id'],
|
||||
['user.id', 'user.domain_id'],
|
||||
@ -974,10 +919,7 @@ def upgrade():
|
||||
# only for sqlite, once we collapse 073 we can remove this constraint
|
||||
with op.batch_alter_table('assignment') as batch_op:
|
||||
batch_op.create_foreign_key(
|
||||
'fk_assignment_role_id',
|
||||
'role',
|
||||
['role_id'],
|
||||
['id'],
|
||||
'fk_assignment_role_id', 'role', ['role_id'], ['id']
|
||||
)
|
||||
|
||||
# TODO(stephenfin): Remove these procedures in a future contract migration
|
||||
@ -1064,9 +1006,7 @@ def upgrade():
|
||||
# FIXME(stephenfin): This should be dropped when we add the FK
|
||||
# constraint to this column
|
||||
op.create_index(
|
||||
'registered_limit_id',
|
||||
'limit',
|
||||
['registered_limit_id'],
|
||||
'registered_limit_id', 'limit', ['registered_limit_id']
|
||||
)
|
||||
|
||||
# FIXME(stephenfin): These are leftover from when we removed a FK
|
||||
|
@ -28,7 +28,4 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
with op.batch_alter_table('service_provider', schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
'relay_state_prefix',
|
||||
server_default=None,
|
||||
)
|
||||
batch_op.alter_column('relay_state_prefix', server_default=None)
|
||||
|
@ -36,11 +36,7 @@ EXPAND_BRANCH = 'expand'
|
||||
DATA_MIGRATION_BRANCH = 'data_migration'
|
||||
CONTRACT_BRANCH = 'contract'
|
||||
|
||||
RELEASES = (
|
||||
'yoga',
|
||||
'bobcat',
|
||||
'2024.01',
|
||||
)
|
||||
RELEASES = ('yoga', 'bobcat', '2024.01')
|
||||
MILESTONES = (
|
||||
'yoga',
|
||||
# Do not add the milestone until the end of the release
|
||||
@ -48,9 +44,7 @@ MILESTONES = (
|
||||
CURRENT_RELEASE = RELEASES[-1]
|
||||
MIGRATION_BRANCHES = (EXPAND_BRANCH, CONTRACT_BRANCH)
|
||||
VERSIONS_PATH = os.path.join(
|
||||
os.path.dirname(sql.__file__),
|
||||
'migrations',
|
||||
'versions',
|
||||
os.path.dirname(sql.__file__), 'migrations', 'versions'
|
||||
)
|
||||
|
||||
|
||||
@ -77,8 +71,7 @@ def _find_alembic_conf():
|
||||
:returns: An instance of ``alembic.config.Config``
|
||||
"""
|
||||
path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
'alembic.ini',
|
||||
os.path.abspath(os.path.dirname(__file__)), 'alembic.ini'
|
||||
)
|
||||
|
||||
config = alembic_config.Config(os.path.abspath(path))
|
||||
|
@ -66,9 +66,7 @@ check_password = password_hashing.check_password
|
||||
# NOTE(hiromu): This dict defines alternative DN string for X.509. When
|
||||
# retriving DN from X.509, converting attributes types that are not listed
|
||||
# in the RFC4514 to a corresponding alternative DN string.
|
||||
ATTR_NAME_OVERRIDES = {
|
||||
x509.NameOID.EMAIL_ADDRESS: "emailAddress",
|
||||
}
|
||||
ATTR_NAME_OVERRIDES = {x509.NameOID.EMAIL_ADDRESS: "emailAddress"}
|
||||
|
||||
|
||||
def resource_uuid(value):
|
||||
@ -217,7 +215,7 @@ def get_unix_user(user=None):
|
||||
try:
|
||||
i = int(user)
|
||||
except ValueError:
|
||||
raise KeyError("user name '%s' not found" % user)
|
||||
raise KeyError(f"user name '{user}' not found")
|
||||
try:
|
||||
user_info = pwd.getpwuid(i)
|
||||
except KeyError:
|
||||
@ -232,8 +230,7 @@ def get_unix_user(user=None):
|
||||
else:
|
||||
user_cls_name = reflection.get_class_name(user, fully_qualified=False)
|
||||
raise TypeError(
|
||||
'user must be string, int or None; not %s (%r)'
|
||||
% (user_cls_name, user)
|
||||
f'user must be string, int or None; not {user_cls_name} ({user!r})'
|
||||
)
|
||||
|
||||
return user_info.pw_uid, user_info.pw_name
|
||||
@ -278,7 +275,7 @@ def get_unix_group(group=None):
|
||||
try:
|
||||
i = int(group)
|
||||
except ValueError:
|
||||
raise KeyError("group name '%s' not found" % group)
|
||||
raise KeyError(f"group name '{group}' not found")
|
||||
try:
|
||||
group_info = grp.getgrgid(i)
|
||||
except KeyError:
|
||||
@ -295,15 +292,13 @@ def get_unix_group(group=None):
|
||||
group, fully_qualified=False
|
||||
)
|
||||
raise TypeError(
|
||||
'group must be string, int or None; not %s (%r)'
|
||||
% (group_cls_name, group)
|
||||
f'group must be string, int or None; not {group_cls_name} ({group!r})'
|
||||
)
|
||||
|
||||
return group_info.gr_gid, group_info.gr_name
|
||||
|
||||
|
||||
class WhiteListedItemFilter:
|
||||
|
||||
def __init__(self, whitelist, data):
|
||||
self._whitelist = set(whitelist or [])
|
||||
self._data = data
|
||||
|
@ -105,10 +105,7 @@ def set_default_for_default_log_levels():
|
||||
This function needs to be called before CONF().
|
||||
|
||||
"""
|
||||
extra_log_level_defaults = [
|
||||
'dogpile=INFO',
|
||||
'routes=INFO',
|
||||
]
|
||||
extra_log_level_defaults = ['dogpile=INFO', 'routes=INFO']
|
||||
|
||||
log.register_options(CONF)
|
||||
log.set_defaults(
|
||||
|
@ -63,12 +63,7 @@ keystone database or open keystone to a DoS attack.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
caching,
|
||||
cache_time,
|
||||
user_limit,
|
||||
]
|
||||
ALL_OPTS = [driver, caching, cache_time, user_limit]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -75,13 +75,7 @@ have enough services or endpoints to exceed a reasonable limit.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
template_file,
|
||||
driver,
|
||||
caching,
|
||||
cache_time,
|
||||
list_limit,
|
||||
]
|
||||
ALL_OPTS = [template_file, driver, caching, cache_time, list_limit]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -16,7 +16,6 @@ package.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
_DEFAULT_AUTH_METHODS = [
|
||||
'external',
|
||||
'password',
|
||||
|
@ -42,10 +42,7 @@ catalog. If set to false, keystone will return an empty service catalog.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
return_all_endpoints_if_no_filter,
|
||||
]
|
||||
ALL_OPTS = [driver, return_all_endpoints_if_no_filter]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -28,9 +28,7 @@ to set this unless you are providing a custom entry point.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
]
|
||||
ALL_OPTS = [driver]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -62,10 +62,7 @@ this value means that additional secondary keys will be kept in the rotation.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
key_repository,
|
||||
max_active_keys,
|
||||
]
|
||||
ALL_OPTS = [key_repository, max_active_keys]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -60,10 +60,7 @@ this value means that additional secondary keys will be kept in the rotation.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
key_repository,
|
||||
max_active_keys,
|
||||
]
|
||||
ALL_OPTS = [key_repository, max_active_keys]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -67,11 +67,7 @@ recommended value.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
generator,
|
||||
backward_compatible_ids,
|
||||
]
|
||||
ALL_OPTS = [driver, generator, backward_compatible_ids]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -55,11 +55,7 @@ means that access tokens will last forever.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
request_token_duration,
|
||||
access_token_duration,
|
||||
]
|
||||
ALL_OPTS = [driver, request_token_duration, access_token_duration]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -47,7 +47,7 @@ def list_opts():
|
||||
|
||||
def _tupleize(d):
|
||||
"""Convert a dict of options to the 2-tuple format."""
|
||||
return [(key, value) for key, value in d.items()]
|
||||
return list(d.items())
|
||||
|
||||
|
||||
def _list_module_names():
|
||||
@ -69,9 +69,8 @@ def _import_modules(module_names):
|
||||
module = importlib.import_module(full_module_path)
|
||||
if not hasattr(module, LIST_OPTS_FUNC_NAME):
|
||||
raise Exception(
|
||||
"The module '%s' should have a '%s' function which "
|
||||
f"The module '{full_module_path}' should have a '{LIST_OPTS_FUNC_NAME}' function which "
|
||||
"returns the config options."
|
||||
% (full_module_path, LIST_OPTS_FUNC_NAME)
|
||||
)
|
||||
else:
|
||||
imported_modules.append(module)
|
||||
|
@ -38,10 +38,7 @@ Maximum number of entities that will be returned in a policy collection.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
list_limit,
|
||||
]
|
||||
ALL_OPTS = [driver, list_limit]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -83,13 +83,7 @@ has no effect unless global caching and receipt caching are enabled.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
expiration,
|
||||
provider,
|
||||
caching,
|
||||
cache_time,
|
||||
cache_on_issue,
|
||||
]
|
||||
ALL_OPTS = [expiration, provider, caching, cache_time, cache_on_issue]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -65,12 +65,7 @@ has no effect unless global and `[revoke] caching` are both enabled.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
expiration_buffer,
|
||||
caching,
|
||||
cache_time,
|
||||
]
|
||||
ALL_OPTS = [driver, expiration_buffer, caching, cache_time]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -61,12 +61,7 @@ deployment.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
caching,
|
||||
cache_time,
|
||||
list_limit,
|
||||
]
|
||||
ALL_OPTS = [driver, caching, cache_time, list_limit]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -30,9 +30,7 @@ this option unless you are providing a custom entry point.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
]
|
||||
ALL_OPTS = [driver]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -61,11 +61,7 @@ this value.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
trusted_issuer,
|
||||
protocol,
|
||||
issuer_attribute,
|
||||
]
|
||||
ALL_OPTS = [trusted_issuer, protocol, issuer_attribute]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -28,9 +28,7 @@ The number of previous windows to check when processing TOTP passcodes.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
included_previous_windows,
|
||||
]
|
||||
ALL_OPTS = [included_previous_windows]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -52,11 +52,7 @@ unless you are providing a custom entry point.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
allow_redelegation,
|
||||
max_redelegation_count,
|
||||
driver,
|
||||
]
|
||||
ALL_OPTS = [allow_redelegation, max_redelegation_count, driver]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -74,13 +74,7 @@ running deployment.
|
||||
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
driver,
|
||||
caching,
|
||||
cache_time,
|
||||
list_limit,
|
||||
enforcement_model,
|
||||
]
|
||||
ALL_OPTS = [driver, caching, cache_time, list_limit, enforcement_model]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -39,9 +39,7 @@ SENSITIVE/PRIVILEGED DATA.
|
||||
)
|
||||
|
||||
GROUP_NAME = __name__.split('.')[-1]
|
||||
ALL_OPTS = [
|
||||
debug_middleware,
|
||||
]
|
||||
ALL_OPTS = [debug_middleware]
|
||||
|
||||
|
||||
def register_opts(conf):
|
||||
|
@ -54,7 +54,6 @@ class CredentialModel(sql.ModelBase, sql.ModelDictMixinWithExtras):
|
||||
|
||||
|
||||
class Credential(base.CredentialDriverBase):
|
||||
|
||||
# credential crud
|
||||
|
||||
@sql.handle_conflicts(conflict_type='credential')
|
||||
|
@ -50,7 +50,7 @@ class Manager(manager.Manager):
|
||||
if credential['type'] == 'ec2':
|
||||
decrypted_blob = json.loads(
|
||||
PROVIDERS.credential_provider_api.decrypt(
|
||||
credential['encrypted_blob'],
|
||||
credential['encrypted_blob']
|
||||
)
|
||||
)
|
||||
else:
|
||||
|
@ -17,7 +17,6 @@ CONF = keystone.conf.CONF
|
||||
|
||||
|
||||
class Manager(manager.Manager):
|
||||
|
||||
driver_namespace = 'keystone.credential.provider'
|
||||
_provides_api = 'credential_provider_api'
|
||||
|
||||
|
@ -85,7 +85,7 @@ class Provider(core.Provider):
|
||||
primary_key_hash(keys),
|
||||
)
|
||||
except (TypeError, ValueError) as e:
|
||||
msg = 'Credential could not be encrypted: %s' % str(e)
|
||||
msg = f'Credential could not be encrypted: {str(e)}'
|
||||
tr_msg = _('Credential could not be encrypted: %s') % str(e)
|
||||
LOG.error(msg)
|
||||
raise exception.CredentialEncryptionError(tr_msg)
|
||||
|
@ -135,7 +135,7 @@ update_request_body: dict[str, Any] = {
|
||||
"properties": _credential_properties,
|
||||
"additionalProperties": True,
|
||||
"minProperties": 1,
|
||||
},
|
||||
}
|
||||
},
|
||||
"required": ["credential"],
|
||||
}
|
||||
|
@ -50,7 +50,6 @@ class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin):
|
||||
|
||||
|
||||
class EndpointPolicy(base.EndpointPolicyDriverBase):
|
||||
|
||||
def create_policy_association(
|
||||
self, policy_id, endpoint_id=None, service_id=None, region_id=None
|
||||
):
|
||||
|
@ -97,7 +97,6 @@ class Manager(manager.Manager):
|
||||
)
|
||||
|
||||
def list_endpoints_for_policy(self, policy_id):
|
||||
|
||||
def _get_endpoint(endpoint_id, policy_id):
|
||||
try:
|
||||
return PROVIDERS.catalog_api.get_endpoint(endpoint_id)
|
||||
@ -235,7 +234,6 @@ class Manager(manager.Manager):
|
||||
return matching_endpoints
|
||||
|
||||
def get_policy_for_endpoint(self, endpoint_id):
|
||||
|
||||
def _get_policy(policy_id, endpoint_id):
|
||||
try:
|
||||
return PROVIDERS.policy_api.get_policy(policy_id)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user