Merge "Re-join the strings after re-formatting"
This commit is contained in:
commit
f45921840c
@ -538,16 +538,14 @@ class DomainAPI(ks_flask.APIBase):
|
||||
),
|
||||
ks_flask.construct_resource_map(
|
||||
resource=DefaultConfigOptionResource,
|
||||
url=('/domains/config/<string:group>' '/<string:option>/default'),
|
||||
url=('/domains/config/<string:group>/<string:option>/default'),
|
||||
resource_kwargs={},
|
||||
rel='domain_config_default_option',
|
||||
path_vars={'group': CONFIG_GROUP, 'option': CONFIG_OPTION},
|
||||
),
|
||||
ks_flask.construct_resource_map(
|
||||
resource=DomainUserListResource,
|
||||
url=(
|
||||
'/domains/<string:domain_id>/users' '/<string:user_id>/roles'
|
||||
),
|
||||
url=('/domains/<string:domain_id>/users/<string:user_id>/roles'),
|
||||
resource_kwargs={},
|
||||
rel='domain_user_roles',
|
||||
path_vars={
|
||||
@ -571,9 +569,7 @@ class DomainAPI(ks_flask.APIBase):
|
||||
),
|
||||
ks_flask.construct_resource_map(
|
||||
resource=DomainGroupListResource,
|
||||
url=(
|
||||
'/domains/<string:domain_id>/groups' '/<string:group_id>/roles'
|
||||
),
|
||||
url=('/domains/<string:domain_id>/groups/<string:group_id>/roles'),
|
||||
resource_kwargs={},
|
||||
rel='domain_group_roles',
|
||||
path_vars={
|
||||
|
@ -700,7 +700,7 @@ class UserAppCredListCreateResource(ks_flask.ResourceBase):
|
||||
_check_unrestricted_application_credential(token)
|
||||
if self.oslo_context.user_id != user_id:
|
||||
action = _(
|
||||
'Cannot create an application credential for another ' 'user.'
|
||||
'Cannot create an application credential for another user.'
|
||||
)
|
||||
raise ks_exception.ForbiddenAction(action=action)
|
||||
project_id = self.oslo_context.project_id
|
||||
|
@ -168,7 +168,7 @@ class AuthInfo(provider_api.ProviderAPIMixin):
|
||||
):
|
||||
msg = 'Domain name cannot contain reserved characters.'
|
||||
tr_msg = _(
|
||||
'Domain name cannot contain reserved ' 'characters.'
|
||||
'Domain name cannot contain reserved characters.'
|
||||
)
|
||||
LOG.warning(msg)
|
||||
raise exception.Unauthorized(message=tr_msg)
|
||||
@ -194,7 +194,7 @@ class AuthInfo(provider_api.ProviderAPIMixin):
|
||||
):
|
||||
msg = 'Project name cannot contain reserved characters.'
|
||||
tr_msg = _(
|
||||
'Project name cannot contain reserved ' 'characters.'
|
||||
'Project name cannot contain reserved characters.'
|
||||
)
|
||||
LOG.warning(msg)
|
||||
raise exception.Unauthorized(message=tr_msg)
|
||||
|
@ -302,7 +302,7 @@ class Bootstrapper:
|
||||
self.admin_user_id, self.admin_role_id
|
||||
)
|
||||
LOG.info(
|
||||
'Granted role %(role)s on the system to user' ' %(username)s.',
|
||||
'Granted role %(role)s on the system to user %(username)s.',
|
||||
{
|
||||
'role': self.admin_role_name,
|
||||
'username': self.admin_username,
|
||||
|
@ -277,12 +277,12 @@ class ProjectSetup(BaseApp):
|
||||
'--project-name',
|
||||
default=None,
|
||||
required=True,
|
||||
help='The name of the keystone project being' ' created.',
|
||||
help='The name of the keystone project being created.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--project-id',
|
||||
default=None,
|
||||
help='The UUID of the keystone project being' ' created.',
|
||||
help='The UUID of the keystone project being created.',
|
||||
)
|
||||
return parser
|
||||
|
||||
@ -313,7 +313,7 @@ class UserSetup(BaseApp):
|
||||
'--username',
|
||||
default=None,
|
||||
required=True,
|
||||
help='The username of the keystone user that' ' is being created.',
|
||||
help='The username of the keystone user that is being created.',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--user-password-plain',
|
||||
@ -325,7 +325,7 @@ class UserSetup(BaseApp):
|
||||
parser.add_argument(
|
||||
'--user-id',
|
||||
default=None,
|
||||
help='The UUID of the keystone user being ' 'created.',
|
||||
help='The UUID of the keystone user being created.',
|
||||
)
|
||||
return parser
|
||||
|
||||
@ -987,7 +987,7 @@ class TrustFlush(BaseApp):
|
||||
)
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
"'%s'Invalid input for date, should be " "DD-MM-YYYY",
|
||||
"'%s'Invalid input for date, should be DD-MM-YYYY",
|
||||
CONF.command.date,
|
||||
)
|
||||
else:
|
||||
@ -1021,23 +1021,23 @@ class MappingPurge(BaseApp):
|
||||
parser.add_argument(
|
||||
'--domain-name',
|
||||
default=None,
|
||||
help=('Purge any mappings for the domain ' 'specified.'),
|
||||
help=('Purge any mappings for the domain specified.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--public-id',
|
||||
default=None,
|
||||
help=('Purge the mapping for the Public ID ' 'specified.'),
|
||||
help=('Purge the mapping for the Public ID specified.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--local-id',
|
||||
default=None,
|
||||
help=('Purge the mappings for the Local ID ' 'specified.'),
|
||||
help=('Purge the mappings for the Local ID specified.'),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--type',
|
||||
default=None,
|
||||
choices=['user', 'group'],
|
||||
help=('Purge any mappings for the type ' 'specified.'),
|
||||
help=('Purge any mappings for the type specified.'),
|
||||
)
|
||||
return parser
|
||||
|
||||
@ -1067,7 +1067,7 @@ class MappingPurge(BaseApp):
|
||||
or CONF.command.type is not None
|
||||
):
|
||||
raise ValueError(
|
||||
_('--all option cannot be mixed with ' 'other options')
|
||||
_('--all option cannot be mixed with other options')
|
||||
)
|
||||
|
||||
def get_domain_id(name):
|
||||
@ -1132,7 +1132,7 @@ def _domain_config_finder(conf_dir):
|
||||
continue
|
||||
|
||||
LOG.warning(
|
||||
'Ignoring file (%s) while scanning ' 'domain config directory',
|
||||
'Ignoring file (%s) while scanning domain config directory',
|
||||
fname,
|
||||
)
|
||||
|
||||
@ -1381,7 +1381,7 @@ class MappingEngineTester(BaseApp):
|
||||
self.rules = jsonutils.load(file)
|
||||
except ValueError as e:
|
||||
raise SystemExit(
|
||||
_('Error while parsing rules ' '%(path)s: %(err)s')
|
||||
_('Error while parsing rules %(path)s: %(err)s')
|
||||
% {'path': path, 'err': e}
|
||||
)
|
||||
|
||||
@ -1392,7 +1392,7 @@ class MappingEngineTester(BaseApp):
|
||||
self.assertion = file.read().strip()
|
||||
except OSError as e:
|
||||
raise SystemExit(
|
||||
_("Error while opening file " "%(path)s: %(err)s")
|
||||
_("Error while opening file %(path)s: %(err)s")
|
||||
% {'path': path, 'err': e}
|
||||
)
|
||||
|
||||
@ -1540,7 +1540,7 @@ class MappingEngineTester(BaseApp):
|
||||
'--engine-debug',
|
||||
default=False,
|
||||
action="store_true",
|
||||
help=("Enable debug messages from the mapping " "engine."),
|
||||
help=("Enable debug messages from the mapping engine."),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--mapping-schema-version',
|
||||
|
@ -225,7 +225,7 @@ class FernetUtils:
|
||||
key_files, _ = self._get_key_files(self.key_repository)
|
||||
|
||||
LOG.info(
|
||||
'Starting key rotation with %(count)s key files: ' '%(list)s',
|
||||
'Starting key rotation with %(count)s key files: %(list)s',
|
||||
{'count': len(key_files), 'list': list(key_files.values())},
|
||||
)
|
||||
|
||||
|
@ -20,11 +20,11 @@ collection_path = '/v3/users/{user_id}/access_rules'
|
||||
resource_path = collection_path + '/{access_rule_id}'
|
||||
|
||||
SYSTEM_READER_OR_OWNER = (
|
||||
'(' + base.SYSTEM_READER + ') or ' 'user_id:%(target.user.id)s'
|
||||
'(' + base.SYSTEM_READER + ') or user_id:%(target.user.id)s'
|
||||
)
|
||||
|
||||
SYSTEM_ADMIN_OR_OWNER = (
|
||||
'(' + base.SYSTEM_ADMIN + ') or ' 'user_id:%(target.user.id)s'
|
||||
'(' + base.SYSTEM_ADMIN + ') or user_id:%(target.user.id)s'
|
||||
)
|
||||
|
||||
access_rule_policies = [
|
||||
|
@ -28,7 +28,7 @@ auth_policies = [
|
||||
name=base.IDENTITY % 'get_auth_projects',
|
||||
check_str='',
|
||||
description=(
|
||||
'List all projects a user has access to via role ' 'assignments.'
|
||||
'List all projects a user has access to via role assignments.'
|
||||
),
|
||||
operations=[
|
||||
{'path': '/v3/auth/projects', 'method': 'GET'},
|
||||
@ -39,7 +39,7 @@ auth_policies = [
|
||||
name=base.IDENTITY % 'get_auth_domains',
|
||||
check_str='',
|
||||
description=(
|
||||
'List all domains a user has access to via role ' 'assignments.'
|
||||
'List all domains a user has access to via role assignments.'
|
||||
),
|
||||
operations=[
|
||||
{'path': '/v3/auth/domains', 'method': 'GET'},
|
||||
|
@ -21,10 +21,10 @@ RULE_ADMIN_OR_CREDENTIAL_OWNER = (
|
||||
'(rule:owner and user_id:%(target.credential.user_id)s)'
|
||||
)
|
||||
RULE_ADMIN_OR_TARGET_DOMAIN = (
|
||||
'rule:admin_required or ' 'token.project.domain.id:%(target.domain.id)s'
|
||||
'rule:admin_required or token.project.domain.id:%(target.domain.id)s'
|
||||
)
|
||||
RULE_ADMIN_OR_TARGET_PROJECT = (
|
||||
'rule:admin_required or ' 'project_id:%(target.project.id)s'
|
||||
'rule:admin_required or project_id:%(target.project.id)s'
|
||||
)
|
||||
RULE_ADMIN_OR_TOKEN_SUBJECT = 'rule:admin_or_token_subject' # nosec
|
||||
RULE_REVOKE_EVENT_OR_ADMIN = 'rule:revoke_event_or_admin'
|
||||
|
@ -54,7 +54,7 @@ ec2_credential_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/users/{user_id}/credentials/OS-EC2/' '{credential_id}'
|
||||
'/v3/users/{user_id}/credentials/OS-EC2/{credential_id}'
|
||||
),
|
||||
'method': 'GET',
|
||||
}
|
||||
@ -92,7 +92,7 @@ ec2_credential_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/users/{user_id}/credentials/OS-EC2/' '{credential_id}'
|
||||
'/v3/users/{user_id}/credentials/OS-EC2/{credential_id}'
|
||||
),
|
||||
'method': 'DELETE',
|
||||
}
|
||||
|
@ -126,13 +126,13 @@ group_endpoint_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}'
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
|
||||
),
|
||||
'method': 'GET',
|
||||
},
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}'
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
|
||||
),
|
||||
'method': 'HEAD',
|
||||
},
|
||||
@ -147,7 +147,7 @@ group_endpoint_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}'
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
|
||||
),
|
||||
'method': 'PATCH',
|
||||
}
|
||||
@ -162,7 +162,7 @@ group_endpoint_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/' '{endpoint_group_id}'
|
||||
'/v3/OS-EP-FILTER/endpoint_groups/{endpoint_group_id}'
|
||||
),
|
||||
'method': 'DELETE',
|
||||
}
|
||||
@ -174,7 +174,7 @@ group_endpoint_policies = [
|
||||
check_str=base.RULE_ADMIN_OR_SYSTEM_READER,
|
||||
scope_types=['system', 'project'],
|
||||
description=(
|
||||
'List all projects associated with a specific endpoint ' 'group.'
|
||||
'List all projects associated with a specific endpoint group.'
|
||||
),
|
||||
operations=[
|
||||
{
|
||||
@ -208,7 +208,7 @@ group_endpoint_policies = [
|
||||
check_str=base.RULE_ADMIN_OR_SYSTEM_READER,
|
||||
scope_types=['system', 'project'],
|
||||
description=(
|
||||
'Check if an endpoint group is associated with a ' 'project.'
|
||||
'Check if an endpoint group is associated with a project.'
|
||||
),
|
||||
operations=[
|
||||
{
|
||||
@ -236,7 +236,7 @@ group_endpoint_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/projects/{project_id}/' 'endpoint_groups'
|
||||
'/v3/OS-EP-FILTER/projects/{project_id}/endpoint_groups'
|
||||
),
|
||||
'method': 'GET',
|
||||
}
|
||||
|
@ -281,13 +281,13 @@ policy_association_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' 'policy'
|
||||
'/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/policy'
|
||||
),
|
||||
'method': 'GET',
|
||||
},
|
||||
{
|
||||
'path': (
|
||||
'/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/' 'policy'
|
||||
'/v3/endpoints/{endpoint_id}/OS-ENDPOINT-POLICY/policy'
|
||||
),
|
||||
'method': 'HEAD',
|
||||
},
|
||||
@ -302,7 +302,7 @@ policy_association_policies = [
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/' 'endpoints'
|
||||
'/v3/policies/{policy_id}/OS-ENDPOINT-POLICY/endpoints'
|
||||
),
|
||||
'method': 'GET',
|
||||
}
|
||||
|
@ -67,9 +67,7 @@ project_endpoint_policies = [
|
||||
description='List projects allowed to access an endpoint.',
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/endpoints/{endpoint_id}/' 'projects'
|
||||
),
|
||||
'path': ('/v3/OS-EP-FILTER/endpoints/{endpoint_id}/projects'),
|
||||
'method': 'GET',
|
||||
}
|
||||
],
|
||||
@ -121,9 +119,7 @@ project_endpoint_policies = [
|
||||
description='List the endpoints a project is allowed to access.',
|
||||
operations=[
|
||||
{
|
||||
'path': (
|
||||
'/v3/OS-EP-FILTER/projects/{project_id}/' 'endpoints'
|
||||
),
|
||||
'path': ('/v3/OS-EP-FILTER/projects/{project_id}/endpoints'),
|
||||
'method': 'GET',
|
||||
}
|
||||
],
|
||||
|
@ -101,7 +101,7 @@ user_policies = [
|
||||
# system and has a valid token, they should be able to generate a list
|
||||
# of projects they have access to.
|
||||
description=(
|
||||
'List all projects a user has access to via role ' 'assignments.'
|
||||
'List all projects a user has access to via role assignments.'
|
||||
),
|
||||
operations=[{'path': ' /v3/auth/projects', 'method': 'GET'}],
|
||||
),
|
||||
@ -114,7 +114,7 @@ user_policies = [
|
||||
# system and has a valid token, they should be able to generate a list
|
||||
# of domains they have access to.
|
||||
description=(
|
||||
'List all domains a user has access to via role ' 'assignments.'
|
||||
'List all domains a user has access to via role assignments.'
|
||||
),
|
||||
operations=[{'path': '/v3/auth/domains', 'method': 'GET'}],
|
||||
),
|
||||
|
@ -165,7 +165,7 @@ class ResourceOptionRegistry:
|
||||
|
||||
if option.option_id in self._registered_options:
|
||||
raise ValueError(
|
||||
_('Option %(option_id)s already defined in ' '%(registry)s.')
|
||||
_('Option %(option_id)s already defined in %(registry)s.')
|
||||
% {
|
||||
'option_id': option.option_id,
|
||||
'registry': self._registry_type,
|
||||
@ -173,7 +173,7 @@ class ResourceOptionRegistry:
|
||||
)
|
||||
if option.option_name in self.option_names:
|
||||
raise ValueError(
|
||||
_('Option %(option_name)s already defined in ' '%(registry)s')
|
||||
_('Option %(option_name)s already defined in %(registry)s')
|
||||
% {
|
||||
'option_name': option.option_name,
|
||||
'registry': self._registry_type,
|
||||
@ -223,12 +223,12 @@ class ResourceOption:
|
||||
)
|
||||
elif len(option_id) != 4:
|
||||
raise ValueError(
|
||||
_('`option_id` must be 4 characters in ' 'length. Got %r')
|
||||
_('`option_id` must be 4 characters in length. Got %r')
|
||||
% option_id
|
||||
)
|
||||
if not isinstance(option_name, str):
|
||||
raise TypeError(
|
||||
_('`option_name` must be a string. ' 'Got %r') % option_name
|
||||
_('`option_name` must be a string. Got %r') % option_name
|
||||
)
|
||||
|
||||
self._option_id = option_id
|
||||
|
@ -574,7 +574,7 @@ def handle_conflicts(conflict_type='object'):
|
||||
) % {'field': field, 'name': name, 'domain_id': domain_id}
|
||||
elif name:
|
||||
msg = _(
|
||||
'Duplicate entry found with %(field)s ' '%(name)s'
|
||||
'Duplicate entry found with %(field)s %(name)s'
|
||||
) % {'field': field, 'name': name}
|
||||
elif domain_id:
|
||||
msg = _('Duplicate entry at domain ID %s') % domain_id
|
||||
|
@ -47,7 +47,7 @@ def validate_password(password):
|
||||
)
|
||||
LOG.error(msg, pattern)
|
||||
detail = _(
|
||||
"Unable to validate password due to invalid " "configuration"
|
||||
"Unable to validate password due to invalid configuration"
|
||||
)
|
||||
raise exception.PasswordValidationError(detail=detail)
|
||||
|
||||
@ -88,7 +88,7 @@ class SchemaValidator:
|
||||
# fails validation.
|
||||
path = '/'.join(map(str, ex.path))
|
||||
detail = _(
|
||||
"Invalid input for field '%(path)s': " "%(message)s"
|
||||
"Invalid input for field '%(path)s': %(message)s"
|
||||
) % {'path': path, 'message': str(ex)}
|
||||
else:
|
||||
detail = str(ex)
|
||||
|
@ -314,7 +314,7 @@ class Manager(manager.Manager):
|
||||
# No policy is associated with endpoint, handled below.
|
||||
pass
|
||||
|
||||
msg = _(
|
||||
'No policy is associated with endpoint ' '%(endpoint_id)s.'
|
||||
) % {'endpoint_id': endpoint_id}
|
||||
msg = _('No policy is associated with endpoint %(endpoint_id)s.') % {
|
||||
'endpoint_id': endpoint_id
|
||||
}
|
||||
raise exception.NotFound(msg)
|
||||
|
@ -107,7 +107,7 @@ class ValidationError(Error):
|
||||
|
||||
class URLValidationError(ValidationError):
|
||||
message_format = _(
|
||||
"Cannot create an endpoint with an invalid URL:" " %(url)s."
|
||||
"Cannot create an endpoint with an invalid URL: %(url)s."
|
||||
)
|
||||
|
||||
|
||||
@ -117,7 +117,7 @@ class PasswordValidationError(ValidationError):
|
||||
|
||||
class PasswordRequirementsValidationError(PasswordValidationError):
|
||||
message_format = _(
|
||||
"The password does not match the requirements:" " %(detail)s."
|
||||
"The password does not match the requirements: %(detail)s."
|
||||
)
|
||||
|
||||
|
||||
@ -385,7 +385,7 @@ class AdditionalAuthRequired(AuthPluginException):
|
||||
|
||||
class Forbidden(SecurityError):
|
||||
message_format = _(
|
||||
"You are not authorized to perform the" " requested action."
|
||||
"You are not authorized to perform the requested action."
|
||||
)
|
||||
code = int(http.client.FORBIDDEN)
|
||||
title = http.client.responses[http.client.FORBIDDEN]
|
||||
@ -618,7 +618,7 @@ class AccessRuleNotFound(NotFound):
|
||||
|
||||
class Conflict(Error):
|
||||
message_format = _(
|
||||
"Conflict occurred attempting to store %(type)s -" " %(details)s."
|
||||
"Conflict occurred attempting to store %(type)s - %(details)s."
|
||||
)
|
||||
code = int(http.client.CONFLICT)
|
||||
title = http.client.responses[http.client.CONFLICT]
|
||||
@ -654,13 +654,13 @@ class UnexpectedError(SecurityError):
|
||||
|
||||
class TrustConsumeMaximumAttempt(UnexpectedError):
|
||||
debug_message_format = _(
|
||||
"Unable to consume trust %(trust_id)s. Unable to " "acquire lock."
|
||||
"Unable to consume trust %(trust_id)s. Unable to acquire lock."
|
||||
)
|
||||
|
||||
|
||||
class MalformedEndpoint(UnexpectedError):
|
||||
debug_message_format = _(
|
||||
"Malformed endpoint URL (%(endpoint)s)," " see ERROR log for details."
|
||||
"Malformed endpoint URL (%(endpoint)s), see ERROR log for details."
|
||||
)
|
||||
|
||||
|
||||
@ -693,7 +693,7 @@ class AssignmentTypeCalculationError(UnexpectedError):
|
||||
|
||||
class NotImplemented(Error):
|
||||
message_format = _(
|
||||
"The action you have requested has not" " been implemented."
|
||||
"The action you have requested has not been implemented."
|
||||
)
|
||||
code = int(http.client.NOT_IMPLEMENTED)
|
||||
title = http.client.responses[http.client.NOT_IMPLEMENTED]
|
||||
@ -742,9 +742,7 @@ class MigrationNotProvided(Exception):
|
||||
|
||||
|
||||
class UnsupportedTokenVersionException(UnexpectedError):
|
||||
debug_message_format = _(
|
||||
'Token version is unrecognizable or ' 'unsupported.'
|
||||
)
|
||||
debug_message_format = _('Token version is unrecognizable or unsupported.')
|
||||
|
||||
|
||||
class SAMLSigningError(UnexpectedError):
|
||||
@ -782,7 +780,7 @@ class CredentialEncryptionError(Exception):
|
||||
|
||||
class LDAPServerConnectionError(UnexpectedError):
|
||||
debug_message_format = _(
|
||||
'Unable to establish a connection to ' 'LDAP Server (%(url)s).'
|
||||
'Unable to establish a connection to LDAP Server (%(url)s).'
|
||||
)
|
||||
|
||||
|
||||
|
@ -339,9 +339,7 @@ def validate_idp(idp, protocol, assertion):
|
||||
try:
|
||||
idp_remote_identifier = assertion[remote_id_parameter]
|
||||
except KeyError:
|
||||
msg = _(
|
||||
'Could not find Identity Provider identifier in ' 'environment'
|
||||
)
|
||||
msg = _('Could not find Identity Provider identifier in environment')
|
||||
raise exception.ValidationError(msg)
|
||||
if idp_remote_identifier not in idp['remote_ids']:
|
||||
msg = _(
|
||||
|
@ -79,7 +79,7 @@ def utf8_encode(value):
|
||||
value_cls_name = reflection.get_class_name(
|
||||
value, fully_qualified=False
|
||||
)
|
||||
raise TypeError("value must be basestring, " "not %s" % value_cls_name)
|
||||
raise TypeError("value must be basestring, not %s" % value_cls_name)
|
||||
|
||||
|
||||
_utf8_decoder = codecs.getdecoder('utf-8')
|
||||
@ -692,7 +692,7 @@ def _common_ldap_initialization(
|
||||
if use_tls or using_ldaps:
|
||||
if not ldap.TLS_AVAIL:
|
||||
raise ValueError(
|
||||
_('Invalid LDAP TLS_AVAIL option: %s. TLS ' 'not available')
|
||||
_('Invalid LDAP TLS_AVAIL option: %s. TLS not available')
|
||||
% ldap.TLS_AVAIL
|
||||
)
|
||||
if not tls_cacertfile and not tls_cacertdir:
|
||||
@ -713,7 +713,7 @@ def _common_ldap_initialization(
|
||||
# connection
|
||||
if not os.path.isfile(tls_cacertfile):
|
||||
raise OSError(
|
||||
_("tls_cacertfile %s not found " "or is not a file")
|
||||
_("tls_cacertfile %s not found or is not a file")
|
||||
% tls_cacertfile
|
||||
)
|
||||
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
|
||||
@ -726,7 +726,7 @@ def _common_ldap_initialization(
|
||||
# connection
|
||||
if not os.path.isdir(tls_cacertdir):
|
||||
raise OSError(
|
||||
_("tls_cacertdir %s not found " "or is not a directory")
|
||||
_("tls_cacertdir %s not found or is not a directory")
|
||||
% tls_cacertdir
|
||||
)
|
||||
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
|
||||
@ -1620,7 +1620,7 @@ class BaseLdap:
|
||||
id_attrs = lower_res.get(self.id_attr.lower())
|
||||
if not id_attrs:
|
||||
message = _(
|
||||
'ID attribute %(id_attr)s not found in LDAP ' 'object %(dn)s'
|
||||
'ID attribute %(id_attr)s not found in LDAP object %(dn)s'
|
||||
) % ({'id_attr': self.id_attr, 'dn': res[0]})
|
||||
raise exception.NotFound(message=message)
|
||||
if len(id_attrs) > 1:
|
||||
|
@ -150,7 +150,7 @@ class Identity(base.IdentityDriverBase):
|
||||
break
|
||||
else:
|
||||
raise exception.NotFound(
|
||||
_("User '%(user_id)s' not found in" " group '%(group_id)s'")
|
||||
_("User '%(user_id)s' not found in group '%(group_id)s'")
|
||||
% {'user_id': user_id, 'group_id': group_id}
|
||||
)
|
||||
|
||||
|
@ -337,7 +337,7 @@ class Identity(base.IdentityDriverBase):
|
||||
return
|
||||
|
||||
raise exception.NotFound(
|
||||
_("User '%(user_id)s' not found in" " group '%(group_id)s'")
|
||||
_("User '%(user_id)s' not found in group '%(group_id)s'")
|
||||
% {'user_id': user_id, 'group_id': group_id}
|
||||
)
|
||||
|
||||
|
@ -1732,7 +1732,7 @@ class Manager(manager.Manager):
|
||||
|
||||
if email:
|
||||
LOG.debug(
|
||||
"Executing the e-mail update for federated user " "[%s].",
|
||||
"Executing the e-mail update for federated user [%s].",
|
||||
user,
|
||||
)
|
||||
|
||||
|
@ -481,10 +481,10 @@ class TokenModel:
|
||||
raise exception.ProjectNotFound(tr_msg)
|
||||
if self.project and not self.project_domain.get('enabled'):
|
||||
msg = (
|
||||
'Unable to validate token because domain %(id)s is ' 'disabled'
|
||||
'Unable to validate token because domain %(id)s is disabled'
|
||||
) % {'id': self.project_domain['id']}
|
||||
tr_msg = _(
|
||||
'Unable to validate token because domain %(id)s is ' 'disabled'
|
||||
'Unable to validate token because domain %(id)s is disabled'
|
||||
) % {'id': self.project_domain['id']}
|
||||
LOG.warning(msg)
|
||||
raise exception.DomainNotFound(tr_msg)
|
||||
@ -513,10 +513,10 @@ class TokenModel:
|
||||
|
||||
if not self.user_domain.get('enabled'):
|
||||
msg = (
|
||||
'Unable to validate token because domain %(id)s is ' 'disabled'
|
||||
'Unable to validate token because domain %(id)s is disabled'
|
||||
) % {'id': self.user_domain['id']}
|
||||
tr_msg = _(
|
||||
'Unable to validate token because domain %(id)s is ' 'disabled'
|
||||
'Unable to validate token because domain %(id)s is disabled'
|
||||
) % {'id': self.user_domain['id']}
|
||||
LOG.warning(msg)
|
||||
raise exception.DomainNotFound(tr_msg)
|
||||
|
@ -309,7 +309,7 @@ class Resource(base.ResourceDriverBase):
|
||||
or project_id == base.NULL_DOMAIN_ID
|
||||
):
|
||||
LOG.warning(
|
||||
'Project %s does not exist and was not ' 'deleted.',
|
||||
'Project %s does not exist and was not deleted.',
|
||||
project_id,
|
||||
)
|
||||
query.delete(synchronize_session=False)
|
||||
|
@ -107,9 +107,7 @@ class Manager(manager.Manager):
|
||||
|
||||
if project_ref['parent_id']:
|
||||
raise exception.ValidationError(
|
||||
message=_(
|
||||
'only root projects are allowed to act as ' 'domains.'
|
||||
)
|
||||
message=_('only root projects are allowed to act as domains.')
|
||||
)
|
||||
|
||||
def _assert_regular_project_constraints(self, project_ref):
|
||||
|
@ -1020,7 +1020,7 @@ class ResourceBase(flask_restful.Resource):
|
||||
else:
|
||||
msg = 'No domain information specified as part of list request'
|
||||
tr_msg = _(
|
||||
'No domain information specified as part of list ' 'request'
|
||||
'No domain information specified as part of list request'
|
||||
)
|
||||
LOG.warning(msg)
|
||||
raise exception.Unauthorized(tr_msg)
|
||||
|
@ -484,7 +484,8 @@ class AuthContextMiddleware(
|
||||
'service_user_name',
|
||||
'service_project_id',
|
||||
'service_project_name',
|
||||
'service_user_domain_id' 'service_user_domain_name',
|
||||
'service_user_domain_id',
|
||||
'service_user_domain_name',
|
||||
'service_project_domain_id',
|
||||
'service_project_domain_name',
|
||||
'service_roles',
|
||||
|
@ -21,7 +21,7 @@ class FormatUrlTests(unit.BaseTestCase):
|
||||
|
||||
def test_successful_formatting(self):
|
||||
url_template = (
|
||||
'http://server:9090/' '$(tenant_id)s/$(user_id)s/$(project_id)s'
|
||||
'http://server:9090/$(tenant_id)s/$(user_id)s/$(project_id)s'
|
||||
)
|
||||
project_id = uuid.uuid4().hex
|
||||
values = {'tenant_id': 'A', 'user_id': 'B', 'project_id': project_id}
|
||||
@ -68,7 +68,7 @@ class FormatUrlTests(unit.BaseTestCase):
|
||||
# list then MalformedEndpoint is raised.
|
||||
# For example, admin_token isn't allowed.
|
||||
url_template = (
|
||||
'http://server:9090/' '$(project_id)s/$(user_id)s/$(admin_token)s'
|
||||
'http://server:9090/$(project_id)s/$(user_id)s/$(admin_token)s'
|
||||
)
|
||||
values = {'user_id': 'B', 'admin_token': 'C'}
|
||||
self.assertRaises(
|
||||
@ -82,7 +82,7 @@ class FormatUrlTests(unit.BaseTestCase):
|
||||
# This is intentional behavior since we don't want to skip
|
||||
# all the later endpoints once there is an URL of endpoint
|
||||
# trying to replace 'tenant_id' with None.
|
||||
url_template = 'http://server:9090/' '$(tenant_id)s/$(user_id)s'
|
||||
url_template = 'http://server:9090/$(tenant_id)s/$(user_id)s'
|
||||
values = {'user_id': 'B'}
|
||||
self.assertIsNone(
|
||||
utils.format_url(
|
||||
@ -97,7 +97,7 @@ class FormatUrlTests(unit.BaseTestCase):
|
||||
# This is intentional behavior since we don't want to skip
|
||||
# all the later endpoints once there is an URL of endpoint
|
||||
# trying to replace 'project_id' with None.
|
||||
url_template = 'http://server:9090/' '$(project_id)s/$(user_id)s'
|
||||
url_template = 'http://server:9090/$(project_id)s/$(user_id)s'
|
||||
values = {'user_id': 'B'}
|
||||
self.assertIsNone(
|
||||
utils.format_url(
|
||||
|
@ -465,7 +465,7 @@ class FakeLdap(common.LDAPHandler):
|
||||
else:
|
||||
LOG.debug('modify item failed: unknown command %s', cmd)
|
||||
raise NotImplementedError(
|
||||
'modify_s action %s not' ' implemented' % cmd
|
||||
'modify_s action %s not implemented' % cmd
|
||||
)
|
||||
self.db[key] = entry
|
||||
self.db.sync()
|
||||
@ -492,7 +492,7 @@ class FakeLdap(common.LDAPHandler):
|
||||
|
||||
if (not filterstr) and (scope != ldap.SCOPE_BASE):
|
||||
raise AssertionError(
|
||||
'Search without filter on onelevel or ' 'subtree scope'
|
||||
'Search without filter on onelevel or subtree scope'
|
||||
)
|
||||
|
||||
if scope == ldap.SCOPE_BASE:
|
||||
|
@ -558,7 +558,8 @@ class CommonLdapTestCase(unit.BaseTestCase):
|
||||
'false',
|
||||
'True',
|
||||
'False',
|
||||
'TrUe' 'FaLse',
|
||||
'TrUe',
|
||||
'FaLse',
|
||||
]
|
||||
for user_name in boolean_strings:
|
||||
user_id = uuid.uuid4().hex
|
||||
|
@ -25,7 +25,7 @@ from keystone.tests.unit.ksfixtures import database
|
||||
|
||||
PROVIDERS = provider_api.ProviderAPIs
|
||||
BROKEN_WRITE_FUNCTIONALITY_MSG = (
|
||||
"Templated backend doesn't correctly " "implement write operations"
|
||||
"Templated backend doesn't correctly implement write operations"
|
||||
)
|
||||
|
||||
|
||||
|
@ -866,7 +866,7 @@ class TestDomainConfigFinder(unit.BaseTestCase):
|
||||
)
|
||||
|
||||
expected_msg_template = (
|
||||
'Ignoring file (%s) while scanning ' 'domain config directory'
|
||||
'Ignoring file (%s) while scanning domain config directory'
|
||||
)
|
||||
self.assertThat(
|
||||
self.logging.output,
|
||||
|
@ -29,7 +29,7 @@ class BaseStyleCheck(unit.BaseTestCase):
|
||||
def get_checker(self):
|
||||
"""Return the checker to be used for tests in this class."""
|
||||
raise NotImplementedError(
|
||||
'subclasses must provide ' 'a real implementation'
|
||||
'subclasses must provide a real implementation'
|
||||
)
|
||||
|
||||
def get_fixture(self):
|
||||
|
@ -2915,7 +2915,7 @@ class AssignmentInheritanceTestCase(
|
||||
)
|
||||
self.assertEqual(error_msg, r.result['error']['message'])
|
||||
r = self.get(
|
||||
'/role_assignments?scope.project.id&' 'include_subtree=True',
|
||||
'/role_assignments?scope.project.id&include_subtree=True',
|
||||
expected_status=http.client.BAD_REQUEST,
|
||||
)
|
||||
self.assertEqual(error_msg, r.result['error']['message'])
|
||||
@ -3117,7 +3117,7 @@ class AssignmentInheritanceTestCase(
|
||||
|
||||
# Get inherited role assignments
|
||||
collection_url = (
|
||||
'/role_assignments' '?scope.OS-INHERIT:inherited_to=projects'
|
||||
'/role_assignments?scope.OS-INHERIT:inherited_to=projects'
|
||||
)
|
||||
r = self.get(collection_url)
|
||||
self.assertValidRoleAssignmentListResponse(
|
||||
@ -4187,7 +4187,7 @@ class GroupSystemRoleAssignmentTestCase(
|
||||
)
|
||||
self.put(member_url)
|
||||
member_url = (
|
||||
'/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s'
|
||||
'/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s'
|
||||
) % {
|
||||
'project_id': self.project_id,
|
||||
'group_id': group['id'],
|
||||
@ -4233,7 +4233,7 @@ class GroupSystemRoleAssignmentTestCase(
|
||||
)
|
||||
self.put(member_url)
|
||||
member_url = (
|
||||
'/projects/%(project_id)s/groups/%(group_id)s/' 'roles/%(role_id)s'
|
||||
'/projects/%(project_id)s/groups/%(group_id)s/roles/%(role_id)s'
|
||||
) % {
|
||||
'project_id': self.project_id,
|
||||
'group_id': group['id'],
|
||||
|
@ -4064,7 +4064,7 @@ class TestTokenRevokeById(test_v3.RestfulTestCase):
|
||||
def test_deleting_project_deletes_grants(self):
|
||||
# This is to make it a little bit more pretty with PEP8
|
||||
role_path = (
|
||||
'/projects/%(project_id)s/users/%(user_id)s/' 'roles/%(role_id)s'
|
||||
'/projects/%(project_id)s/users/%(user_id)s/roles/%(role_id)s'
|
||||
)
|
||||
role_path = role_path % {
|
||||
'user_id': self.user['id'],
|
||||
|
@ -505,7 +505,7 @@ class CatalogTestCase(test_v3.RestfulTestCase):
|
||||
# interface, region_id and service_id specified
|
||||
ref = self._create_random_endpoint(interface='internal')
|
||||
response = self.get(
|
||||
('/endpoints?interface=%s®ion_id=%s' '&service_id=%s')
|
||||
('/endpoints?interface=%s®ion_id=%s&service_id=%s')
|
||||
% (ref['interface'], ref['region_id'], ref['service_id'])
|
||||
)
|
||||
self.assertValidEndpointListResponse(response, ref=ref)
|
||||
|
@ -134,9 +134,9 @@ class EndpointPolicyTestCase(test_v3.RestfulTestCase):
|
||||
|
||||
def test_list_endpoints_for_policy(self):
|
||||
"""GET & HEAD /policies/%(policy_id}/endpoints."""
|
||||
url = '/policies/%(policy_id)s/OS-ENDPOINT-POLICY' '/endpoints' % {
|
||||
'policy_id': self.policy['id']
|
||||
}
|
||||
url = '/policies/{policy_id}/OS-ENDPOINT-POLICY/endpoints'.format(
|
||||
policy_id=self.policy['id']
|
||||
)
|
||||
self.put(url + '/' + self.endpoint['id'])
|
||||
r = self.get(url)
|
||||
self.assertValidEndpointListResponse(r, ref=self.endpoint)
|
||||
@ -234,7 +234,7 @@ class JsonHomeTests(test_v3.JsonHomeTestMixin):
|
||||
'/ext/OS-ENDPOINT-POLICY/1.0/rel'
|
||||
)
|
||||
PARAM_LOCATION = (
|
||||
'https://docs.openstack.org/api/openstack-identity/3/' 'param'
|
||||
'https://docs.openstack.org/api/openstack-identity/3/param'
|
||||
)
|
||||
|
||||
JSON_HOME_DATA = {
|
||||
|
@ -1677,7 +1677,7 @@ class FederatedIdentityProviderTests(test_v3.RestfulTestCase):
|
||||
deleted.
|
||||
|
||||
"""
|
||||
url = self.base_url(suffix='%(idp_id)s/' 'protocols/%(protocol_id)s')
|
||||
url = self.base_url(suffix='%(idp_id)s/protocols/%(protocol_id)s')
|
||||
resp, idp_id, proto = self._assign_protocol_to_idp(
|
||||
expected_status=http.client.CREATED
|
||||
)
|
||||
@ -5297,7 +5297,5 @@ class K2KServiceCatalogTests(test_v3.RestfulTestCase):
|
||||
self.assertNotIn(
|
||||
'service_providers',
|
||||
token['token'],
|
||||
message=(
|
||||
'Expected Service Catalog not to have ' 'service_providers'
|
||||
),
|
||||
message=('Expected Service Catalog not to have service_providers'),
|
||||
)
|
||||
|
@ -206,10 +206,10 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
APP_CRED_CREATE_URL = '/users/%(user_id)s/application_credentials'
|
||||
APP_CRED_LIST_URL = '/users/%(user_id)s/application_credentials'
|
||||
APP_CRED_DELETE_URL = (
|
||||
'/users/%(user_id)s/application_credentials/' '%(app_cred_id)s'
|
||||
'/users/%(user_id)s/application_credentials/%(app_cred_id)s'
|
||||
)
|
||||
APP_CRED_SHOW_URL = (
|
||||
'/users/%(user_id)s/application_credentials/' '%(app_cred_id)s'
|
||||
'/users/%(user_id)s/application_credentials/%(app_cred_id)s'
|
||||
)
|
||||
ACCESS_TOKEN_URL = '/OS-OAUTH2/token'
|
||||
|
||||
@ -462,7 +462,9 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
app_cred = self._create_app_cred(self.user_id, client_name)
|
||||
data = {'grant_type': ''}
|
||||
error = 'unsupported_grant_type'
|
||||
error_description = 'The parameter grant_type ' ' is not supported.'
|
||||
error_description = (
|
||||
f'The parameter grant_type {data["grant_type"]} is not supported.'
|
||||
)
|
||||
resp = self._get_access_token(
|
||||
app_cred,
|
||||
b64str=None,
|
||||
@ -483,8 +485,7 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
data = {'grant_type': 'not_client_credentials'}
|
||||
error = 'unsupported_grant_type'
|
||||
error_description = (
|
||||
'The parameter grant_type '
|
||||
'not_client_credentials is not supported.'
|
||||
f'The parameter grant_type {data["grant_type"]} is not supported.'
|
||||
)
|
||||
resp = self._get_access_token(
|
||||
app_cred,
|
||||
@ -517,7 +518,7 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
data = {'grant_type': 'client_credentials'}
|
||||
data = parse.urlencode(data).encode()
|
||||
with mock.patch(
|
||||
'keystone.api._shared.authentication.' 'authenticate_for_token'
|
||||
'keystone.api._shared.authentication.authenticate_for_token'
|
||||
) as co_mock:
|
||||
co_mock.side_effect = exception.Unauthorized(
|
||||
'client is unauthorized'
|
||||
@ -557,7 +558,7 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
data = {'grant_type': 'client_credentials'}
|
||||
data = parse.urlencode(data).encode()
|
||||
with mock.patch(
|
||||
'keystone.api._shared.authentication.' 'authenticate_for_token'
|
||||
'keystone.api._shared.authentication.authenticate_for_token'
|
||||
) as co_mock:
|
||||
co_mock.side_effect = exception.ValidationError(
|
||||
'Auth method is invalid'
|
||||
@ -592,7 +593,7 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
data = {'grant_type': 'client_credentials'}
|
||||
data = parse.urlencode(data).encode()
|
||||
with mock.patch(
|
||||
'keystone.api._shared.authentication.' 'authenticate_for_token'
|
||||
'keystone.api._shared.authentication.authenticate_for_token'
|
||||
) as co_mock:
|
||||
co_mock.side_effect = exception.UnexpectedError(
|
||||
'unexpected error.'
|
||||
@ -627,7 +628,7 @@ class OAuth2SecretBasicTests(test_v3.OAuth2RestfulTestCase):
|
||||
data = {'grant_type': 'client_credentials'}
|
||||
data = parse.urlencode(data).encode()
|
||||
with mock.patch(
|
||||
'keystone.api._shared.authentication.' 'authenticate_for_token'
|
||||
'keystone.api._shared.authentication.authenticate_for_token'
|
||||
) as co_mock:
|
||||
co_mock.side_effect = Exception('Internal server is invalid')
|
||||
resp = self.post(
|
||||
@ -1206,7 +1207,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -1948,7 +1949,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -1976,7 +1977,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -2004,7 +2005,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -2032,7 +2033,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -2060,7 +2061,7 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
)
|
||||
self.assertUnauthorizedResp(resp)
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
||||
@ -2156,6 +2157,6 @@ class OAuth2CertificateTests(test_v3.OAuth2RestfulTestCase):
|
||||
self.assertEqual('other_error', json_resp['error'])
|
||||
self.assertEqual(err_msg, json_resp['error_description'])
|
||||
self.assertIn(
|
||||
'Get OAuth2.0 Access Token API: ' 'mapping rule process failed.',
|
||||
'Get OAuth2.0 Access Token API: mapping rule process failed.',
|
||||
self.log_fix.output,
|
||||
)
|
||||
|
@ -30,7 +30,7 @@ from keystone.tests import unit
|
||||
v3_MEDIA_TYPES = [
|
||||
{
|
||||
"base": "application/json",
|
||||
"type": "application/" "vnd.openstack.identity-v3+json",
|
||||
"type": "application/vnd.openstack.identity-v3+json",
|
||||
}
|
||||
]
|
||||
|
||||
@ -171,7 +171,7 @@ FEDERATED_IDP_SPECIFIC_WEBSSO = (
|
||||
)
|
||||
|
||||
APPLICATION_CREDENTIAL = (
|
||||
'/users/{user_id}/application_credentials/' '{application_credential_id}'
|
||||
'/users/{user_id}/application_credentials/{application_credential_id}'
|
||||
)
|
||||
APPLICATION_CREDENTIALS = '/users/{user_id}/application_credentials'
|
||||
APPLICATION_CREDENTIAL_RELATION = json_home.build_v3_parameter_relation(
|
||||
@ -665,7 +665,7 @@ V3_JSON_HOME_RESOURCES = {
|
||||
},
|
||||
_build_ep_filter_rel(resource_name='project_endpoint'): {
|
||||
'href-template': (
|
||||
'/OS-EP-FILTER/projects/{project_id}' '/endpoints/{endpoint_id}'
|
||||
'/OS-EP-FILTER/projects/{project_id}/endpoints/{endpoint_id}'
|
||||
),
|
||||
'href-vars': {
|
||||
'endpoint_id': json_home.Parameters.ENDPOINT_ID,
|
||||
|
@ -65,7 +65,7 @@ class TestOverrideSkipping(unit.BaseTestCase):
|
||||
observed_error = result.decorated.errors[0]
|
||||
observed_error_msg = observed_error[1]
|
||||
expected_error_msg = (
|
||||
"'test_not_in_parent' is not a previously " "defined test method"
|
||||
"'test_not_in_parent' is not a previously defined test method"
|
||||
)
|
||||
self.assertIn(expected_error_msg, observed_error_msg)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user